EchoSpike Predictive Plasticity¶

In [ ]:
import matplotlib.pyplot as plt
import os
from utils import  get_accuracy, get_samples, train_out_proj_fast, train_out_proj_closed_form
from main import Args
from data import load_SHD
from model import EchoSpike, simple_out
import numpy as np
from data import augment_shd
import torch
import seaborn as sns
from scipy.signal import savgol_filter
from tqdm.notebook import trange
from matplotlib import pyplot
pyplot.rcParams['figure.dpi'] = 600
import pickle
torch.manual_seed(0)
color_list = sns.color_palette('muted')
device = 'cpu'
batch_size = 64
folder = 'models/'
model_name = folder + 'shd_1layer_large.pt'
with open(model_name[:-3] + '_args.pkl', 'rb') as f:
    args = pickle.load(f)
# args = Args()
online = args.online
print(vars(args))
/home/lars/miniconda3/lib/python3.9/site-packages/torchvision/datapoints/__init__.py:12: UserWarning: The torchvision.datapoints and torchvision.transforms.v2 namespaces are still Beta. While we do not expect major breaking changes, some APIs may still change according to user feedback. Please submit any feedback you may have in this issue: https://github.com/pytorch/vision/issues/6753, and you can also check out https://github.com/pytorch/vision/issues/7319 to learn more about the APIs that we suspect might involve future changes. You can silence this warning by calling torchvision.disable_beta_transforms_warning().
  warnings.warn(_BETA_TRANSFORMS_WARNING)
/home/lars/miniconda3/lib/python3.9/site-packages/torchvision/transforms/v2/__init__.py:54: UserWarning: The torchvision.datapoints and torchvision.transforms.v2 namespaces are still Beta. While we do not expect major breaking changes, some APIs may still change according to user feedback. Please submit any feedback you may have in this issue: https://github.com/pytorch/vision/issues/6753, and you can also check out https://github.com/pytorch/vision/issues/7319 to learn more about the APIs that we suspect might involve future changes. You can silence this warning by calling torchvision.disable_beta_transforms_warning().
  warnings.warn(_BETA_TRANSFORMS_WARNING)
{'model_name': 'shd_1layer_large', 'dataset': 'shd', 'online': True, 'device': 'cuda', 'recurrency_type': 'none', 'lr': 0.0001, 'epochs': 1000, 'augment': True, 'batch_size': 128, 'n_hidden': [1332], 'inp_thr': 0.05, 'c_y': [1.5, -1.5], 'n_inputs': 700, 'n_outputs': 20, 'n_time_bins': 100, 'beta': 0.95}

Dataset¶

Spiking Heidelberg Digits

In [ ]:
#train_loader, test_loader = load_PMNIST(n_time_bins, scale=0.9, patches=True) #load_NMNIST(n_time_bins, batch_size=batch_size)
n_time_bins = 100
train_loader, test_loader = load_SHD(batch_size=batch_size) #load_NMNIST(n_time_bins, batch_size=batch_size)
# Plot Example(s)
for i in range(1):
    frames, target = train_loader.next_item(-1, contrastive=True)
    plt.figure(figsize=(10, 10))
    plt.axis('off')
    plt.imshow(frames.squeeze(1).T)
    # plt.colorbar()
    print(frames.shape, target)
plt.axis('on')
/home/lars/ownCloud/ETH/Master/Project_2/SNN_CLAPP/data.py:28: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
  self.y = torch.tensor(y)
torch.Size([100, 1, 700]) tensor([4.])
Out[ ]:
(-0.5, 99.5, 699.5, -0.5)

Load pretrained model¶

In [ ]:
SNN = EchoSpike(args.n_inputs, args.n_hidden, beta=args.beta, c_y=args.c_y, device=device, recurrency_type=args.recurrency_type, online=args.online).to(device)
SNN.load_state_dict(torch.load(model_name, map_location=device))
# train(SNN, train_loader, args.epochs, device, args.model_name,
                            # batch_size=args.batch_size, online=args.online, lr=1e-8, augment=args.augment)
from_epoch = 0
echo_train_loss = torch.load(model_name[:-3]+'_loss_hist.pt', map_location='cpu')[int(from_epoch*len(train_loader)/args.batch_size):]
print(echo_train_loss.shape)
for i in range(echo_train_loss.shape[-1]):
    plt.plot(from_epoch+(args.batch_size*np.arange(echo_train_loss.shape[0])/len(train_loader)), savgol_filter(echo_train_loss[:,i], 99, 1), color=color_list[i])
plt.legend([f'layer {i+1}' for i in range(len(SNN.layers))])
# no y ticks, because it's not really meaningful
plt.yticks([])
plt.xlabel('Epoch')
plt.ylabel('EchoSpike Loss')
torch.Size([63719, 1])
Out[ ]:
Text(0, 0.5, 'EchoSpike Loss')
In [ ]:
# plotting adaptive threshold and update rate for an example
# init_echo, label_0 = train_loader.next_item(-1, contrastive=True)
# sample_1, label_1 = train_loader.next_item(-1, contrastive=True)
# sample_2, label_2 = train_loader.next_item(label_1, contrastive=False)
print(label_0, label_1, label_2)
SNN.eval()
with torch.no_grad():
    # feed first sample to get initial activity
    for t in range(100):
        inp_activity = init_echo[t].mean(axis=-1)
        SNN(init_echo[t], torch.tensor(-1, device=device), inp_activity=inp_activity)
    SNN.reset(-1)
    # feed second sample to get the update rates and thresholds for contrastive case
    contrastive_thresholds = torch.zeros(100)
    contrastive_temp_sim = torch.zeros((len(SNN.layers), 100))
    for t in range(100):
        inp_activity = sample_1[t].mean(axis=-1)
        out_spk, mems, losses = SNN(sample_1[t], torch.tensor(-1, device=device), inp_activity=inp_activity)
        contrastive_thresholds[t] = inp_activity * args.c_y[1]
        contrastive_temp_sim[:, t] = losses
    SNN.reset(-1)
    # feed third sample to get the update rates and thresholds for predictive case
    predictive_thresholds = torch.zeros(100)
    predictive_temp_sim = torch.zeros((len(SNN.layers), 100))
    for t in range(100):
        inp_activity = sample_2[t].mean(axis=-1)
        out_spk, mems, losses = SNN(sample_1[t], torch.tensor(1, device=device), inp_activity=inp_activity)
        predictive_thresholds[t] = inp_activity * args.c_y[0]
        predictive_temp_sim[:, t] = -losses
    SNN.reset(1)
    # plot thresholds, with sample as background
    layer = 2
    fig, ax = plt.subplots(figsize=(10, 5))
    ax2  = ax.twinx()
    # imshow in background
    ax.imshow(sample_1.squeeze(1).T, aspect='auto', cmap='Reds')
    ax2.plot(-contrastive_temp_sim[layer], color='r', label='Negative Similarity Score')
    ax2.plot(contrastive_thresholds, color='r', linestyle='--', label='Contrastive Threshold')
    ax2.hlines(args.inp_thr*args.c_y[1], 0, 100, color='r', linestyle=':', label='Input Threshold (times c(-1))')
    # highlight regions where the thresholds are crossed
    argwhere = np.argwhere(np.logical_and((-contrastive_temp_sim[layer] < contrastive_thresholds).numpy(), contrastive_thresholds.numpy() < args.inp_thr*args.c_y[1]))
    for i in range(argwhere.shape[0]):
        ax2.axvspan(argwhere[i], argwhere[i]+1, color='r', alpha=0.2, lw=0)

    ax.yaxis.set_visible(False)
    ax2.spines['right'].set_visible(False)
    ax2.yaxis.tick_left()
    ax2.yaxis.set_label_position('left')
    ax2.set_xlim(ax.get_xlim())
    # get rid of right margin
    ax2.margins(0)
    ax.set_xlabel('Timesteps')
    plt.ylabel('Thresholds & Similarity Score')
    plt.xlim(0, 100)
    plt.legend()
    # same for predictive
    fig, ax = plt.subplots(figsize=(10, 5))
    ax2  = ax.twinx()
    # imshow in background
    ax.imshow(sample_2.squeeze(1).T, aspect='auto', cmap='Blues')
    ax2.plot(predictive_temp_sim[layer], color='b', label='Similarity Score')
    ax2.plot(predictive_thresholds, color='b', linestyle='--', label='Predictive Threshold')
    ax2.hlines(args.inp_thr*args.c_y[0], 0, 100, color='b', linestyle=':', label='Input Threshold (times c(1))')
    # highlight regions where the thresholds are crossed
    argwhere = np.argwhere(np.logical_and((predictive_temp_sim[layer] < predictive_thresholds).numpy(), predictive_thresholds.numpy() > args.inp_thr*args.c_y[0]))
    for i in range(argwhere.shape[0]):
        ax2.axvspan(argwhere[i], argwhere[i]+1, color='b', alpha=0.1, lw=0)
    ax.yaxis.set_visible(False)
    ax2.spines['right'].set_visible(False)
    ax2.yaxis.tick_left()
    ax2.yaxis.set_label_position('left')
    ax2.set_xlim(ax.get_xlim())
    # get rid of right margin
    #ax2.margins(0)
    ax.set_xlabel('Timesteps')
    plt.ylabel('Thresholds & Similarity Score')
    plt.xlim(0, 100)
    plt.legend()
    plt.show()
tensor([9.]) tensor([7.]) tensor([7.])
/home/lars/miniconda3/lib/python3.9/site-packages/matplotlib/patches.py:1111: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
  xy = np.asarray(xy)
/home/lars/miniconda3/lib/python3.9/site-packages/matplotlib/patches.py:1111: VisibleDeprecationWarning: Creating an ndarray from ragged nested sequences (which is a list-or-tuple of lists-or-tuples-or ndarrays with different lengths or shapes) is deprecated. If you meant to do this, you must specify 'dtype=object' when creating the ndarray.
  xy = np.asarray(xy)

Analyze Weights Directly¶

In [ ]:
layers = [SNN.layers[0].fc.weight[:,:args.n_inputs]]
for i in range(1, len(SNN.layers)):
    layers.append(SNN.layers[i].fc.weight[:,:args.n_hidden[i-1]] @ layers[-1])

for i in range(len(SNN.layers)):
    plt.figure()
    plt.imshow(SNN.layers[i].fc.weight.detach(), cmap='viridis')
    plt.colorbar()
    # plt.figure()
    # plt.imshow(SNN.layers[i].pred.weight.detach(), vmax=0.5, vmin=-0.5)
    # plt.colorbar()
for lay in layers:
    plt.figure()
    plt.imshow(lay.detach())
    plt.colorbar()
/tmp/ipykernel_19791/2587452636.py:8: MatplotlibDeprecationWarning: Auto-removal of grids by pcolor() and pcolormesh() is deprecated since 3.5 and will be removed two minor releases later; please call grid(False) first.
  plt.colorbar()
/tmp/ipykernel_19791/2587452636.py:15: MatplotlibDeprecationWarning: Auto-removal of grids by pcolor() and pcolormesh() is deprecated since 3.5 and will be removed two minor releases later; please call grid(False) first.
  plt.colorbar()

Train output Projection¶

In [ ]:
from tqdm.notebook import tqdm
def train_out_proj(epochs, batch, cat, out_projs=None):
    # train output projections from all layers (and no layer)
    losses_out = []
    beta = 1.0
    lr = 1e-4
    augment = True
    optimizers = []
    print_interval = 10*batch
    if out_projs is None:
        out_projs = []
        out_proj_0 = simple_out(700, 20, beta=beta)
    else:
        for out_p in out_projs:
            out_p.train()
            out_p.reset()
        out_proj_0 = out_projs[0]
        out_projs = out_projs[1:]
    optim_0 = torch.optim.Adam(out_proj_0.parameters(), lr=lr)
    for lay in range(len(SNN.layers)):
        if len(out_projs) <= lay:
            if cat:
                out_projs.append(simple_out(sum(args.n_hidden[:lay+1])+700, 20, beta=beta))
            else:
                out_projs.append(simple_out(args.n_hidden[lay], 20, beta=beta))
        optimizers.append(torch.optim.Adam(out_projs[lay].parameters(), lr=lr))
        optimizers[-1].zero_grad()
    SNN.eval()
    acc = []
    target = batch_size*[-1]
    correct = (len(SNN.layers) + 1)*[0]
    with torch.no_grad():
        pbar = tqdm(total=len(train_loader)*epochs)
        while len(losses_out)*batch < len(train_loader)*epochs:
            data, target = train_loader.next_item(target, contrastive=True)
            SNN.reset(0)
            logit_lists = [[] for _ in range(len(SNN.layers)+1)]
            data = data.squeeze()
            if augment:
                data = augment_shd(data)
            for step in range(data.shape[0]):
                data_step = data[step].float().to(device)
                target = target.to(device)
                logits, _, _ = SNN(data_step, 0)
                if step == args.n_time_bins-1:
                    _, logts = out_proj_0(data_step, target)
                    logit_lists[0] = logts
                    for lay in range(len(SNN.layers)):
                        if cat:
                            data_step = torch.cat([data_step, logits[lay]], dim=-1)
                        else:
                            data_step = logits[lay]
                        _, logts = out_projs[lay](data_step, target)
                        logit_lists[lay+1] = logts
                else:
                    out_proj_0(data_step, None)
                    for lay in range(len(SNN.layers)):
                        if cat:
                            data_step = torch.cat([data_step, logits[lay]], dim=-1)
                        else:
                            data_step = logits[lay]
                        out_projs[lay](data_step, None)
            
            preds = [logit_lists[lay].argmax(axis=-1) for lay in range(len(SNN.layers)+1)]
            correct = [correct[lay] + (preds[lay] == target).sum() for lay in range(len(SNN.layers)+1)]
            out_proj_0.reset()
            for i, out_proj in enumerate(out_projs):
                out_proj.reset()

            losses_out.append(torch.tensor([torch.nn.functional.cross_entropy(logit_lists[lay], target.squeeze().long()) for lay in range(len(SNN.layers)+1)], requires_grad=False))

            optim_0.step()
            optim_0.zero_grad()
            for opt in optimizers:
                opt.step()
                opt.zero_grad()
            
            if len(losses_out)*batch % print_interval == 0:
                pbar.write(f'Cross Entropy Loss: {(torch.stack(losses_out)[-print_interval//batch:].sum(dim=0)/(print_interval//batch)).numpy()}\n' +
                           f'Correct: {100*np.array(correct)/print_interval}%')
                acc.append(np.array(correct)/print_interval)
                correct = (len(SNN.layers) + 1)*[0]
            pbar.update(batch)
    return [out_proj_0, *out_projs], np.asarray(acc), torch.stack(losses_out)

with torch.no_grad():
    if args.augment:
        n_epochs = 100
        cat = True
        # if already trained, load the output projections
        if os.path.exists(model_name[:-3]+'_out_projs.pt'):
            out_projs = torch.load(model_name[:-3]+'_out_projs.pt', map_location=device)
        else:
            out_projs, acc, losses_out = train_out_proj(n_epochs, batch_size, cat)
            torch.save(out_projs, model_name[:-3]+'_out_projs.pt')
  0%|          | 0/815600 [00:00<?, ?it/s]
Cross Entropy Loss: [11.547028 10.279467]
Correct: [4.0625  4.21875]%
Cross Entropy Loss: [7.328139  6.5790076]
Correct: [4.6875  5.46875]%
Cross Entropy Loss: [6.043669  5.5764885]
Correct: [3.90625 7.34375]%
Cross Entropy Loss: [5.409685  4.8703365]
Correct: [ 6.40625 10.3125 ]%
Cross Entropy Loss: [5.0070696 3.9967246]
Correct: [ 6.40625 11.71875]%
Cross Entropy Loss: [4.4848146 3.7286632]
Correct: [10.      17.34375]%
Cross Entropy Loss: [4.2075944 3.4770484]
Correct: [ 8.28125 16.09375]%
Cross Entropy Loss: [4.335188  3.3485398]
Correct: [ 5.     16.5625]%
Cross Entropy Loss: [4.010795  3.3240619]
Correct: [ 9.375   17.34375]%
Cross Entropy Loss: [3.9271362 3.1658502]
Correct: [10.46875 20.     ]%
Cross Entropy Loss: [3.7754002 3.0408587]
Correct: [12.1875 20.625 ]%
Cross Entropy Loss: [3.6225975 2.900011 ]
Correct: [11.875   21.71875]%
Cross Entropy Loss: [3.4490561 2.735112 ]
Correct: [14.53125 24.53125]%
Cross Entropy Loss: [3.7649338 2.8663561]
Correct: [10.3125 21.25  ]%
Cross Entropy Loss: [3.4246545 2.6423867]
Correct: [16.25   25.9375]%
Cross Entropy Loss: [3.5131593 2.6782365]
Correct: [15.625 25.   ]%
Cross Entropy Loss: [3.3291504 2.4750066]
Correct: [14.6875 29.6875]%
Cross Entropy Loss: [3.487938  2.8259032]
Correct: [16.40625 25.46875]%
Cross Entropy Loss: [3.2510726 2.5092902]
Correct: [17.03125 30.15625]%
Cross Entropy Loss: [3.2190404 2.4318855]
Correct: [19.6875 31.5625]%
Cross Entropy Loss: [3.2163086 2.5250974]
Correct: [17.34375 29.375  ]%
Cross Entropy Loss: [3.1901393 2.4639678]
Correct: [17.5   31.875]%
Cross Entropy Loss: [3.262114  2.7483582]
Correct: [14.6875 25.    ]%
Cross Entropy Loss: [3.238459  2.4897485]
Correct: [14.6875 26.875 ]%
Cross Entropy Loss: [3.1790378 2.4498556]
Correct: [17.5     30.46875]%
Cross Entropy Loss: [3.116194  2.3184893]
Correct: [18.28125 32.5    ]%
Cross Entropy Loss: [3.0982022 2.2454069]
Correct: [17.1875  35.15625]%
Cross Entropy Loss: [2.8361154 2.2411785]
Correct: [21.25    31.40625]%
Cross Entropy Loss: [3.11533 2.43524]
Correct: [18.75    32.65625]%
Cross Entropy Loss: [3.2678192 2.4983623]
Correct: [17.96875 32.1875 ]%
Cross Entropy Loss: [2.9819558 2.4315057]
Correct: [18.75    29.53125]%
Cross Entropy Loss: [3.0120635 2.3935444]
Correct: [18.28125 32.34375]%
Cross Entropy Loss: [3.1706424 2.3176467]
Correct: [19.84375 32.8125 ]%
Cross Entropy Loss: [2.937549  2.2524455]
Correct: [21.25  33.125]%
Cross Entropy Loss: [2.9938216 2.1213818]
Correct: [21.71875 36.5625 ]%
Cross Entropy Loss: [2.935305  2.1350338]
Correct: [20.625 35.625]%
Cross Entropy Loss: [2.9246628 2.0780947]
Correct: [21.71875 36.09375]%
Cross Entropy Loss: [2.8294709 2.124545 ]
Correct: [21.09375 35.9375 ]%
Cross Entropy Loss: [2.7384956 2.1126676]
Correct: [25.9375 35.9375]%
Cross Entropy Loss: [2.9324677 2.1287537]
Correct: [18.59375 34.21875]%
Cross Entropy Loss: [2.774712  2.1222274]
Correct: [22.1875  37.34375]%
Cross Entropy Loss: [2.7487807 2.136859 ]
Correct: [22.1875 35.3125]%
Cross Entropy Loss: [2.571199  1.9620602]
Correct: [25.46875 38.4375 ]%
Cross Entropy Loss: [2.9066844 2.0765138]
Correct: [21.09375 37.34375]%
Cross Entropy Loss: [2.7251754 1.9495407]
Correct: [23.90625 41.09375]%
Cross Entropy Loss: [2.7118683 2.0974839]
Correct: [25.      35.78125]%
Cross Entropy Loss: [2.829248 2.036668]
Correct: [21.09375 37.03125]%
Cross Entropy Loss: [2.7119546 1.9552959]
Correct: [25.9375 40.    ]%
Cross Entropy Loss: [2.6461742 1.9781983]
Correct: [24.84375 42.34375]%
Cross Entropy Loss: [2.7030492 1.8347145]
Correct: [23.90625 41.25   ]%
Cross Entropy Loss: [2.6318977 1.8767936]
Correct: [25.9375  40.46875]%
Cross Entropy Loss: [2.6704402 1.8827312]
Correct: [20.625   41.09375]%
Cross Entropy Loss: [2.5175786 1.7993157]
Correct: [27.1875 43.125 ]%
Cross Entropy Loss: [2.7512326 1.8087791]
Correct: [25.      44.84375]%
Cross Entropy Loss: [2.8878326 2.1020713]
Correct: [24.53125 39.375  ]%
Cross Entropy Loss: [2.8772492 2.160699 ]
Correct: [23.4375  36.09375]%
Cross Entropy Loss: [2.8082938 2.1079106]
Correct: [20.46875 39.375  ]%
Cross Entropy Loss: [2.7869785 2.1138566]
Correct: [21.5625  39.53125]%
Cross Entropy Loss: [2.705688  1.8767178]
Correct: [27.96875 43.28125]%
Cross Entropy Loss: [2.7265964 1.9576203]
Correct: [22.96875 40.3125 ]%
Cross Entropy Loss: [2.6863384 1.8917252]
Correct: [25.      42.34375]%
Cross Entropy Loss: [2.6860063 1.7935721]
Correct: [24.53125 44.0625 ]%
Cross Entropy Loss: [2.6782455 1.877578 ]
Correct: [24.53125 42.8125 ]%
Cross Entropy Loss: [2.7562313 1.939533 ]
Correct: [23.59375 39.53125]%
Cross Entropy Loss: [2.6962    1.9600532]
Correct: [22.03125 39.0625 ]%
Cross Entropy Loss: [2.6985714 2.0427155]
Correct: [23.75    37.96875]%
Cross Entropy Loss: [2.5736718 1.8417275]
Correct: [25.78125 44.6875 ]%
Cross Entropy Loss: [2.7232814 1.8325968]
Correct: [25.9375  43.90625]%
Cross Entropy Loss: [2.658893  1.8290823]
Correct: [26.25    42.03125]%
Cross Entropy Loss: [2.4248204 1.7526   ]
Correct: [29.21875 45.3125 ]%
Cross Entropy Loss: [2.5498035 1.818709 ]
Correct: [28.125 43.125]%
Cross Entropy Loss: [2.5701256 1.8888117]
Correct: [25.15625 43.125  ]%
Cross Entropy Loss: [2.6412044 1.7433386]
Correct: [24.6875 46.875 ]%
Cross Entropy Loss: [2.5773978 1.7834991]
Correct: [28.59375 43.59375]%
Cross Entropy Loss: [2.5386243 1.7177649]
Correct: [27.1875  47.03125]%
Cross Entropy Loss: [2.7433789 1.9466709]
Correct: [24.53125 41.40625]%
Cross Entropy Loss: [2.7309783 1.944927 ]
Correct: [24.53125 42.8125 ]%
Cross Entropy Loss: [2.597528  1.7069016]
Correct: [26.875 46.875]%
Cross Entropy Loss: [2.504317  1.6391151]
Correct: [24.84375 47.8125 ]%
Cross Entropy Loss: [2.6268094 1.8081812]
Correct: [28.75   43.4375]%
Cross Entropy Loss: [2.3559422 1.5530405]
Correct: [29.84375 51.71875]%
Cross Entropy Loss: [2.6840272 1.8994557]
Correct: [22.8125  41.09375]%
Cross Entropy Loss: [2.5092692 1.7880995]
Correct: [31.40625 44.0625 ]%
Cross Entropy Loss: [2.5282958 1.8081329]
Correct: [24.6875  45.15625]%
Cross Entropy Loss: [2.5780385 1.8103586]
Correct: [23.75   45.3125]%
Cross Entropy Loss: [2.5353222 1.8628801]
Correct: [26.25    43.59375]%
Cross Entropy Loss: [2.6354709 1.7523613]
Correct: [24.6875 46.5625]%
Cross Entropy Loss: [2.6773138 1.710297 ]
Correct: [26.25   47.1875]%
Cross Entropy Loss: [2.5506878 1.8069432]
Correct: [26.40625 45.625  ]%
Cross Entropy Loss: [2.4795501 1.7223542]
Correct: [31.875   45.46875]%
Cross Entropy Loss: [2.4127107 1.6696523]
Correct: [27.03125 49.0625 ]%
Cross Entropy Loss: [2.494278  1.6298672]
Correct: [27.96875 48.28125]%
Cross Entropy Loss: [2.5783575 1.8536274]
Correct: [28.90625 42.96875]%
Cross Entropy Loss: [2.4819078 1.5980239]
Correct: [26.40625 48.75   ]%
Cross Entropy Loss: [2.522505  1.7976818]
Correct: [27.5    45.9375]%
Cross Entropy Loss: [2.5696006 1.7941376]
Correct: [28.125   44.21875]%
Cross Entropy Loss: [2.6238158 1.7414192]
Correct: [23.75    45.15625]%
Cross Entropy Loss: [2.467533  1.6846979]
Correct: [28.4375 47.5   ]%
Cross Entropy Loss: [2.4777567 1.7132416]
Correct: [27.5    48.4375]%
Cross Entropy Loss: [2.5988414 1.7249286]
Correct: [24.6875 45.9375]%
Cross Entropy Loss: [2.6110241 1.6215646]
Correct: [26.875  47.8125]%
Cross Entropy Loss: [2.5842297 1.8879182]
Correct: [26.875  45.3125]%
Cross Entropy Loss: [2.5648334 1.6391094]
Correct: [26.09375 50.15625]%
Cross Entropy Loss: [2.4175467 1.6087726]
Correct: [30.625   50.15625]%
Cross Entropy Loss: [2.4196784 1.5193707]
Correct: [29.6875  52.34375]%
Cross Entropy Loss: [2.4159133 1.715313 ]
Correct: [29.53125 50.625  ]%
Cross Entropy Loss: [2.4058912 1.5982571]
Correct: [25.9375  52.03125]%
Cross Entropy Loss: [2.5132065 1.6979488]
Correct: [30.9375 48.75  ]%
Cross Entropy Loss: [2.3489165 1.4691435]
Correct: [30.625   52.65625]%
Cross Entropy Loss: [2.3745773 1.5031377]
Correct: [30.15625 49.53125]%
Cross Entropy Loss: [2.5290515 1.6825842]
Correct: [29.21875 46.40625]%
Cross Entropy Loss: [2.2619288 1.485435 ]
Correct: [28.59375 55.     ]%
Cross Entropy Loss: [2.4236073 1.5599426]
Correct: [29.375   52.03125]%
Cross Entropy Loss: [2.2800725 1.4283792]
Correct: [31.5625  51.09375]%
Cross Entropy Loss: [2.4030137 1.6169411]
Correct: [30.3125 48.75  ]%
Cross Entropy Loss: [2.4873881 1.670544 ]
Correct: [27.8125 51.875 ]%
Cross Entropy Loss: [2.4969494 1.6048985]
Correct: [29.53125 51.09375]%
Cross Entropy Loss: [2.3211188 1.6196492]
Correct: [31.09375 49.21875]%
Cross Entropy Loss: [2.6308784 2.022298 ]
Correct: [24.0625 43.125 ]%
Cross Entropy Loss: [2.521548 1.777303]
Correct: [27.1875 45.9375]%
Cross Entropy Loss: [2.4035237 1.7287416]
Correct: [28.4375 50.    ]%
Cross Entropy Loss: [2.3881755 1.541762 ]
Correct: [28.90625 52.65625]%
Cross Entropy Loss: [2.3806539 1.6321421]
Correct: [30.46875 49.53125]%
Cross Entropy Loss: [2.6190343 1.8100837]
Correct: [27.96875 45.46875]%
Cross Entropy Loss: [2.439155  1.5121105]
Correct: [25.625  49.6875]%
Cross Entropy Loss: [2.583173  1.6513102]
Correct: [26.40625 45.9375 ]%
Cross Entropy Loss: [2.5238998 1.5763774]
Correct: [27.8125  49.21875]%
Cross Entropy Loss: [2.4073443 1.5050321]
Correct: [28.90625 50.625  ]%
Cross Entropy Loss: [2.524694  1.5527531]
Correct: [27.65625 47.96875]%
Cross Entropy Loss: [2.3684602 1.4918591]
Correct: [30.46875 52.65625]%
Cross Entropy Loss: [2.4118505 1.5545434]
Correct: [28.59375 51.5625 ]%
Cross Entropy Loss: [2.4634311 1.7538071]
Correct: [29.21875 47.03125]%
Cross Entropy Loss: [2.4360898 1.584134 ]
Correct: [29.375   50.46875]%
Cross Entropy Loss: [2.5127296 1.5359218]
Correct: [27.34375 51.25   ]%
Cross Entropy Loss: [2.3660333 1.5696493]
Correct: [30.      50.15625]%
Cross Entropy Loss: [2.4647171 1.6629997]
Correct: [25.9375  47.96875]%
Cross Entropy Loss: [2.4856176 1.5934135]
Correct: [29.0625  51.40625]%
Cross Entropy Loss: [2.3955588 1.6565735]
Correct: [29.375  47.8125]%
Cross Entropy Loss: [2.300933  1.5135849]
Correct: [30.      52.65625]%
Cross Entropy Loss: [2.4666774 1.4266646]
Correct: [28.125  54.0625]%
Cross Entropy Loss: [2.3777542 1.501425 ]
Correct: [28.59375 55.     ]%
Cross Entropy Loss: [2.3820775 1.4391178]
Correct: [29.21875 53.90625]%
Cross Entropy Loss: [2.246012  1.4733584]
Correct: [32.03125 53.59375]%
Cross Entropy Loss: [2.3246188 1.4189274]
Correct: [30.    54.375]%
Cross Entropy Loss: [2.2668986 1.289933 ]
Correct: [31.875   59.21875]%
Cross Entropy Loss: [2.3795614 1.4275181]
Correct: [31.5625 53.125 ]%
Cross Entropy Loss: [2.4064636 1.6158082]
Correct: [28.90625 52.8125 ]%
Cross Entropy Loss: [2.3166168 1.4715459]
Correct: [31.09375 52.5    ]%
Cross Entropy Loss: [2.4378324 1.6154232]
Correct: [30.3125 49.375 ]%
Cross Entropy Loss: [2.4443698 1.5304854]
Correct: [26.875  52.8125]%
Cross Entropy Loss: [2.345493  1.5064265]
Correct: [30.15625 52.65625]%
Cross Entropy Loss: [2.435335  1.5872396]
Correct: [27.96875 48.90625]%
Cross Entropy Loss: [2.343924 1.410122]
Correct: [29.6875  52.34375]%
Cross Entropy Loss: [2.6051784 1.7155243]
Correct: [26.5625 48.4375]%
Cross Entropy Loss: [2.2546515 1.4331715]
Correct: [30.15625 55.78125]%
Cross Entropy Loss: [2.2385783 1.4476871]
Correct: [33.59375 53.75   ]%
Cross Entropy Loss: [2.437571  1.6045526]
Correct: [30.46875 51.09375]%
Cross Entropy Loss: [2.1210594 1.4262332]
Correct: [37.03125 55.9375 ]%
Cross Entropy Loss: [2.306909  1.3624324]
Correct: [31.875  56.5625]%
Cross Entropy Loss: [2.1985047 1.4453914]
Correct: [32.65625 54.84375]%
Cross Entropy Loss: [2.2916393 1.4117436]
Correct: [31.5625  54.21875]%
Cross Entropy Loss: [2.2902265 1.2899964]
Correct: [32.1875 55.625 ]%
Cross Entropy Loss: [2.6814091 1.717078 ]
Correct: [24.6875 47.8125]%
Cross Entropy Loss: [2.3209026 1.4212946]
Correct: [31.25    52.65625]%
Cross Entropy Loss: [2.523362  1.7722559]
Correct: [28.28125 45.625  ]%
Cross Entropy Loss: [2.4158573 1.4858605]
Correct: [28.59375 50.     ]%
Cross Entropy Loss: [2.3988004 1.451363 ]
Correct: [29.84375 55.625  ]%
Cross Entropy Loss: [2.288782  1.5539833]
Correct: [34.21875 51.71875]%
Cross Entropy Loss: [2.1517415 1.3548353]
Correct: [36.40625 56.09375]%
Cross Entropy Loss: [2.263907  1.3155946]
Correct: [32.8125 56.25  ]%
Cross Entropy Loss: [2.4293354 1.5458578]
Correct: [29.84375 53.4375 ]%
Cross Entropy Loss: [2.4646707 1.4694978]
Correct: [27.96875 54.0625 ]%
Cross Entropy Loss: [2.252336  1.3820899]
Correct: [32.5     57.03125]%
Cross Entropy Loss: [2.4523537 1.7088375]
Correct: [27.8125  45.15625]%
Cross Entropy Loss: [2.2079015 1.3838061]
Correct: [34.53125 58.125  ]%
Cross Entropy Loss: [2.41941  1.655863]
Correct: [27.5     48.90625]%
Cross Entropy Loss: [2.1807146 1.3409712]
Correct: [33.75   55.3125]%
Cross Entropy Loss: [2.5052664 1.5450155]
Correct: [25.46875 52.1875 ]%
Cross Entropy Loss: [2.293876  1.4608238]
Correct: [30.3125  53.90625]%
Cross Entropy Loss: [2.2270322 1.3514625]
Correct: [30.3125 57.1875]%
Cross Entropy Loss: [2.27249   1.4154068]
Correct: [35.3125 54.0625]%
Cross Entropy Loss: [2.1696658 1.3540035]
Correct: [35.15625 58.75   ]%
Cross Entropy Loss: [2.1841037 1.3581328]
Correct: [32.34375 55.625  ]%
Cross Entropy Loss: [2.3080976 1.31898  ]
Correct: [29.84375 56.875  ]%
Cross Entropy Loss: [2.1723588 1.4418027]
Correct: [31.25 53.75]%
Cross Entropy Loss: [2.3854418 1.552949 ]
Correct: [28.59375 53.125  ]%
Cross Entropy Loss: [2.2253036 1.3714983]
Correct: [33.125 54.375]%
Cross Entropy Loss: [2.334655  1.4863793]
Correct: [31.71875 52.8125 ]%
Cross Entropy Loss: [2.2721412 1.4096668]
Correct: [35.      55.15625]%
Cross Entropy Loss: [2.358171  1.5562574]
Correct: [28.90625 52.8125 ]%
Cross Entropy Loss: [2.2483828 1.4518632]
Correct: [34.53125 52.5    ]%
Cross Entropy Loss: [2.152028  1.2486384]
Correct: [32.1875  60.46875]%
Cross Entropy Loss: [2.3584793 1.4299043]
Correct: [31.71875 52.8125 ]%
Cross Entropy Loss: [2.193479  1.3790807]
Correct: [34.53125 55.78125]%
Cross Entropy Loss: [2.3680105 1.367305 ]
Correct: [30.9375 58.75  ]%
Cross Entropy Loss: [2.2871811 1.3284533]
Correct: [33.28125 56.40625]%
Cross Entropy Loss: [2.190456  1.3831177]
Correct: [33.4375 56.875 ]%
Cross Entropy Loss: [2.3427331 1.2880275]
Correct: [32.03125 57.34375]%
Cross Entropy Loss: [2.2903411 1.4016806]
Correct: [32.34375 54.21875]%
Cross Entropy Loss: [2.311208  1.4543989]
Correct: [30.      52.65625]%
Cross Entropy Loss: [2.2090068 1.4489678]
Correct: [33.125 54.375]%
Cross Entropy Loss: [2.3553758 1.4633597]
Correct: [29.375  51.5625]%
Cross Entropy Loss: [2.3315163 1.2976137]
Correct: [32.03125 58.90625]%
Cross Entropy Loss: [2.396582  1.3882807]
Correct: [32.8125 55.    ]%
Cross Entropy Loss: [2.2618413 1.355479 ]
Correct: [34.375  55.3125]%
Cross Entropy Loss: [2.3490949 1.3624156]
Correct: [31.875   53.59375]%
Cross Entropy Loss: [2.2580884 1.3301762]
Correct: [32.1875 56.5625]%
Cross Entropy Loss: [2.2173898 1.3263395]
Correct: [33.4375  56.71875]%
Cross Entropy Loss: [2.1006398 1.3056804]
Correct: [36.875  57.8125]%
Cross Entropy Loss: [2.3869262 1.550776 ]
Correct: [27.34375 50.46875]%
Cross Entropy Loss: [2.3240395 1.4618952]
Correct: [30.15625 51.5625 ]%
Cross Entropy Loss: [2.336324  1.4258854]
Correct: [31.09375 54.21875]%
Cross Entropy Loss: [2.2560604 1.3314972]
Correct: [33.4375 56.875 ]%
Cross Entropy Loss: [2.3417907 1.3907636]
Correct: [32.34375 53.125  ]%
Cross Entropy Loss: [2.3604074 1.3580186]
Correct: [30.625  54.6875]%
Cross Entropy Loss: [2.3929005 1.387778 ]
Correct: [28.28125 54.84375]%
Cross Entropy Loss: [2.1288085 1.1124246]
Correct: [35.15625 62.8125 ]%
Cross Entropy Loss: [2.3417542 1.378423 ]
Correct: [29.6875  54.53125]%
Cross Entropy Loss: [2.2454643 1.2616739]
Correct: [32.1875 59.375 ]%
Cross Entropy Loss: [2.3279788 1.3385513]
Correct: [31.25  56.875]%
Cross Entropy Loss: [2.2298038 1.314484 ]
Correct: [30.625 58.125]%
Cross Entropy Loss: [2.172442  1.2829348]
Correct: [33.28125 58.125  ]%
Cross Entropy Loss: [2.137268  1.2767311]
Correct: [32.34375 58.90625]%
Cross Entropy Loss: [2.0705247 1.1415958]
Correct: [36.71875 61.71875]%
Cross Entropy Loss: [2.1654131 1.2308266]
Correct: [37.03125 59.21875]%
Cross Entropy Loss: [2.2989793 1.2839758]
Correct: [27.96875 59.84375]%
Cross Entropy Loss: [2.0758576 1.192466 ]
Correct: [36.09375 58.90625]%
Cross Entropy Loss: [2.2291455 1.3364054]
Correct: [29.21875 54.53125]%
Cross Entropy Loss: [2.098725  1.1079288]
Correct: [32.1875  62.96875]%
Cross Entropy Loss: [2.344632  1.2893932]
Correct: [29.21875 58.59375]%
Cross Entropy Loss: [2.21732   1.4002739]
Correct: [32.8125  54.21875]%
Cross Entropy Loss: [2.263341  1.3199869]
Correct: [33.59375 57.8125 ]%
Cross Entropy Loss: [2.2710502 1.4143035]
Correct: [30.3125  54.84375]%
Cross Entropy Loss: [2.1906674 1.2847377]
Correct: [31.25   57.8125]%
Cross Entropy Loss: [2.2859232 1.3841636]
Correct: [31.5625 55.625 ]%
Cross Entropy Loss: [2.357357  1.3988695]
Correct: [31.875   56.09375]%
Cross Entropy Loss: [2.1340368 1.1884091]
Correct: [33.75  60.625]%
Cross Entropy Loss: [2.161813  1.1554286]
Correct: [34.6875  61.71875]%
Cross Entropy Loss: [2.295703  1.3682115]
Correct: [33.90625 58.4375 ]%
Cross Entropy Loss: [2.249732  1.3726225]
Correct: [33.4375 56.5625]%
Cross Entropy Loss: [2.2549    1.3360589]
Correct: [33.125   56.09375]%
Cross Entropy Loss: [2.154574  1.2266434]
Correct: [32.8125  60.78125]%
Cross Entropy Loss: [2.1341043 1.2896738]
Correct: [35.3125  57.96875]%
Cross Entropy Loss: [2.057051  1.1273648]
Correct: [35.  62.5]%
Cross Entropy Loss: [2.252032  1.2401335]
Correct: [29.6875 55.625 ]%
Cross Entropy Loss: [2.2818832 1.3326573]
Correct: [32.96875 56.71875]%
Cross Entropy Loss: [2.2831256 1.3390682]
Correct: [30.9375  59.21875]%
Cross Entropy Loss: [2.2378025 1.1446421]
Correct: [31.5625 61.5625]%
Cross Entropy Loss: [2.0902534 1.2537024]
Correct: [36.71875 58.28125]%
Cross Entropy Loss: [2.2378085 1.2105237]
Correct: [30.3125  58.59375]%
Cross Entropy Loss: [2.1443245 1.4125961]
Correct: [30.9375 52.5   ]%
Cross Entropy Loss: [2.246484  1.3642741]
Correct: [31.09375 57.65625]%
Cross Entropy Loss: [2.4006805 1.3470829]
Correct: [29.375  54.6875]%
Cross Entropy Loss: [2.2944725 1.4076275]
Correct: [32.03125 54.21875]%
Cross Entropy Loss: [2.1394792 1.1804497]
Correct: [34.0625 60.    ]%
Cross Entropy Loss: [2.1804938 1.3144853]
Correct: [32.03125 57.65625]%
Cross Entropy Loss: [2.2569606 1.3775226]
Correct: [31.5625 55.9375]%
Cross Entropy Loss: [2.1640053 1.0883448]
Correct: [33.59375 64.84375]%
Cross Entropy Loss: [2.228837  1.2826229]
Correct: [35.3125 57.1875]%
Cross Entropy Loss: [2.228752  1.1800076]
Correct: [33.4375 59.0625]%
Cross Entropy Loss: [2.1154726 1.14991  ]
Correct: [34.0625  61.71875]%
Cross Entropy Loss: [2.1624076 1.2095882]
Correct: [34.53125 57.34375]%
Cross Entropy Loss: [2.1510472 1.2264321]
Correct: [32.03125 58.90625]%
Cross Entropy Loss: [2.1570544 1.1593139]
Correct: [32.8125  59.84375]%
Cross Entropy Loss: [2.199121  1.2955002]
Correct: [30.78125 57.03125]%
Cross Entropy Loss: [2.1842864 1.1825836]
Correct: [32.03125 60.625  ]%
Cross Entropy Loss: [2.1556182 1.1332409]
Correct: [34.0625 64.0625]%
Cross Entropy Loss: [2.2325969 1.1802193]
Correct: [32.8125  61.09375]%
Cross Entropy Loss: [2.1916656 1.2611946]
Correct: [33.4375 62.1875]%
Cross Entropy Loss: [2.2532954 1.3523535]
Correct: [32.8125  54.21875]%
Cross Entropy Loss: [2.396343  1.4219763]
Correct: [28.59375 53.28125]%
Cross Entropy Loss: [2.1491678 1.2235373]
Correct: [33.28125 61.09375]%
Cross Entropy Loss: [2.118318  1.0769364]
Correct: [34.0625 62.5   ]%
Cross Entropy Loss: [2.3255358 1.3045046]
Correct: [32.65625 58.28125]%
Cross Entropy Loss: [2.1852162 1.199799 ]
Correct: [34.53125 59.375  ]%
Cross Entropy Loss: [2.0368266 1.0803083]
Correct: [36.09375 62.1875 ]%
Cross Entropy Loss: [2.2470813 1.2842554]
Correct: [32.8125 60.3125]%
Cross Entropy Loss: [2.1831179 1.2708771]
Correct: [33.90625 56.875  ]%
Cross Entropy Loss: [2.2474797 1.2542231]
Correct: [35.46875 58.75   ]%
Cross Entropy Loss: [2.271291  1.4141238]
Correct: [32.5    57.1875]%
Cross Entropy Loss: [2.0282187 1.1417139]
Correct: [36.5625  61.09375]%
Cross Entropy Loss: [2.271944  1.2924908]
Correct: [33.125   59.21875]%
Cross Entropy Loss: [1.9779924 1.0554413]
Correct: [37.1875  64.84375]%
Cross Entropy Loss: [2.1437538 1.1971798]
Correct: [34.21875 62.8125 ]%
Cross Entropy Loss: [2.1835713 1.2084684]
Correct: [30.625   61.40625]%
Cross Entropy Loss: [2.1130931 1.1541684]
Correct: [36.5625  62.03125]%
Cross Entropy Loss: [2.365343 1.262916]
Correct: [29.84375 59.84375]%
Cross Entropy Loss: [2.2031398 1.3212793]
Correct: [33.4375 57.5   ]%
Cross Entropy Loss: [2.0822234 1.1251715]
Correct: [36.875   63.59375]%
Cross Entropy Loss: [2.182181  1.0794331]
Correct: [34.21875 62.96875]%
Cross Entropy Loss: [2.2050214 1.2499617]
Correct: [35.46875 60.9375 ]%
Cross Entropy Loss: [2.1580899 1.2144244]
Correct: [37.03125 63.28125]%
Cross Entropy Loss: [2.302331  1.1078289]
Correct: [31.09375 63.28125]%
Cross Entropy Loss: [2.3269413 1.3158305]
Correct: [31.09375 58.125  ]%
Cross Entropy Loss: [1.9508232 0.9413123]
Correct: [36.875  67.1875]%
Cross Entropy Loss: [2.1635375 1.1517922]
Correct: [36.09375 62.03125]%
Cross Entropy Loss: [2.1283307 1.0715101]
Correct: [34.6875 65.3125]%
Cross Entropy Loss: [2.1054733 1.0868407]
Correct: [35.78125 64.84375]%
Cross Entropy Loss: [2.121282  1.2112468]
Correct: [35.46875 60.9375 ]%
Cross Entropy Loss: [2.3119323 1.2582482]
Correct: [32.1875 60.3125]%
Cross Entropy Loss: [2.2024214 1.1248966]
Correct: [32.96875 62.96875]%
Cross Entropy Loss: [2.2456527 1.2467306]
Correct: [33.4375  58.28125]%
Cross Entropy Loss: [2.2351284 1.2362041]
Correct: [32.03125 60.     ]%
Cross Entropy Loss: [2.2906299 1.324496 ]
Correct: [33.125 59.375]%
Cross Entropy Loss: [2.0541458 1.2312233]
Correct: [39.0625 60.9375]%
Cross Entropy Loss: [2.0540617 1.0942738]
Correct: [35.      62.96875]%
Cross Entropy Loss: [2.0988116 1.181782 ]
Correct: [32.96875 61.40625]%
Cross Entropy Loss: [2.1653137 1.0921942]
Correct: [31.71875 65.625  ]%
Cross Entropy Loss: [2.23604   1.2039187]
Correct: [31.5625 61.25  ]%
Cross Entropy Loss: [2.1913505 1.3334619]
Correct: [31.5625 58.125 ]%
Cross Entropy Loss: [2.1451068 1.199805 ]
Correct: [32.1875 59.6875]%
Cross Entropy Loss: [2.1263752 1.2012796]
Correct: [35.78125 62.03125]%
Cross Entropy Loss: [2.2421405 1.2530478]
Correct: [32.5  58.75]%
Cross Entropy Loss: [2.0035973 1.0353441]
Correct: [32.03125 65.46875]%
Cross Entropy Loss: [2.0815494 1.0684505]
Correct: [35.      63.90625]%
Cross Entropy Loss: [2.3784091 1.2601633]
Correct: [31.5625 60.625 ]%
Cross Entropy Loss: [2.3561528 1.3445021]
Correct: [31.5625  57.03125]%
Cross Entropy Loss: [2.3052583 1.3008678]
Correct: [30.625 60.   ]%
Cross Entropy Loss: [2.2473578 1.1938641]
Correct: [31.25  60.625]%
Cross Entropy Loss: [2.2113214 1.1651386]
Correct: [32.03125 60.     ]%
Cross Entropy Loss: [1.9993194 1.0326269]
Correct: [36.25   64.0625]%
Cross Entropy Loss: [2.3084483 1.2695197]
Correct: [31.09375 58.28125]%
Cross Entropy Loss: [2.090709  1.1151636]
Correct: [32.03125 63.4375 ]%
Cross Entropy Loss: [2.092483  1.0895182]
Correct: [36.40625 62.8125 ]%
Cross Entropy Loss: [2.1327562 1.1441859]
Correct: [34.53125 64.0625 ]%
Cross Entropy Loss: [2.0844617 1.010359 ]
Correct: [35.46875 63.59375]%
Cross Entropy Loss: [2.1968627 1.044719 ]
Correct: [34.21875 63.4375 ]%
Cross Entropy Loss: [2.1709208 1.1558254]
Correct: [32.65625 62.5    ]%
Cross Entropy Loss: [2.0604932 1.1196785]
Correct: [35.15625 63.59375]%
Cross Entropy Loss: [2.0242395 0.9732386]
Correct: [38.4375 66.25  ]%
Cross Entropy Loss: [2.0617948 0.9772822]
Correct: [34.21875 65.9375 ]%
Cross Entropy Loss: [2.1740947 1.0507964]
Correct: [33.75 66.25]%
Cross Entropy Loss: [2.3166182 1.3196547]
Correct: [32.5   58.125]%
Cross Entropy Loss: [2.0897927 1.063559 ]
Correct: [35.46875 64.0625 ]%
Cross Entropy Loss: [2.1111739 1.1814932]
Correct: [33.28125 61.71875]%
Cross Entropy Loss: [2.1352334 1.0404382]
Correct: [32.65625 65.78125]%
Cross Entropy Loss: [2.1105945 1.1013682]
Correct: [36.25 63.75]%
Cross Entropy Loss: [2.1439314 1.1407278]
Correct: [34.375   62.96875]%
Cross Entropy Loss: [2.1952286 1.1955607]
Correct: [33.90625 61.875  ]%
Cross Entropy Loss: [2.388993  1.2455508]
Correct: [28.59375 57.65625]%
Cross Entropy Loss: [2.1708105 1.1414335]
Correct: [34.21875 62.8125 ]%
Cross Entropy Loss: [2.2288103 1.19806  ]
Correct: [31.875   61.40625]%
Cross Entropy Loss: [2.045205 1.023423]
Correct: [34.6875  67.03125]%
Cross Entropy Loss: [1.9689144 1.0050857]
Correct: [39.53125 65.46875]%
Cross Entropy Loss: [2.2216945 1.1690744]
Correct: [34.53125 64.21875]%
Cross Entropy Loss: [2.1348023 1.0901505]
Correct: [34.0625  64.21875]%
Cross Entropy Loss: [2.0439563 1.1151766]
Correct: [34.53125 62.5    ]%
Cross Entropy Loss: [2.0622504 1.1634105]
Correct: [36.71875 61.09375]%
Cross Entropy Loss: [2.1602721 1.1398249]
Correct: [35.15625 63.28125]%
Cross Entropy Loss: [2.246863  1.1902416]
Correct: [29.6875  60.46875]%
Cross Entropy Loss: [2.1271245 1.1077254]
Correct: [34.21875 63.59375]%
Cross Entropy Loss: [2.1408122 1.15076  ]
Correct: [33.4375 62.5   ]%
Cross Entropy Loss: [2.204062  1.0945561]
Correct: [31.875  62.1875]%
Cross Entropy Loss: [2.2979643 1.1468229]
Correct: [31.25   62.1875]%
Cross Entropy Loss: [2.0394282 1.0608144]
Correct: [34.6875  64.21875]%
Cross Entropy Loss: [2.098887  1.0595601]
Correct: [34.84375 64.6875 ]%
Cross Entropy Loss: [1.9978164 1.0851679]
Correct: [37.1875  63.90625]%
Cross Entropy Loss: [2.1856441 1.1314865]
Correct: [32.8125 64.0625]%
Cross Entropy Loss: [2.178828 1.011348]
Correct: [36.09375 66.71875]%
Cross Entropy Loss: [2.0903797 1.0689152]
Correct: [35.46875 63.90625]%
Cross Entropy Loss: [2.037422 1.01772 ]
Correct: [33.28125 64.53125]%
Cross Entropy Loss: [2.2295575 1.2932036]
Correct: [33.75    57.96875]%
Cross Entropy Loss: [2.1142807 1.1255467]
Correct: [34.53125 62.5    ]%
Cross Entropy Loss: [2.2124407 1.1669464]
Correct: [35.625   63.28125]%
Cross Entropy Loss: [2.3880396 1.3623283]
Correct: [27.1875  55.46875]%
Cross Entropy Loss: [1.981934 1.059706]
Correct: [37.34375 65.15625]%
Cross Entropy Loss: [2.0750208 1.0448635]
Correct: [35.625 67.5  ]%
Cross Entropy Loss: [2.005344   0.96581876]
Correct: [38.4375 68.125 ]%
Cross Entropy Loss: [2.2773101 1.1735332]
Correct: [31.5625  62.03125]%
Cross Entropy Loss: [2.1085174 1.1090025]
Correct: [37.03125 63.90625]%
Cross Entropy Loss: [2.1459491 1.103513 ]
Correct: [32.96875 64.375  ]%
Cross Entropy Loss: [1.9552917  0.95831597]
Correct: [36.25   67.1875]%
Cross Entropy Loss: [2.0303216 1.1103363]
Correct: [37.34375 62.8125 ]%
Cross Entropy Loss: [2.1273842 0.9918655]
Correct: [33.59375 66.09375]%
Cross Entropy Loss: [2.0291238 1.069046 ]
Correct: [35.3125  64.84375]%
Cross Entropy Loss: [2.2756724 1.1967787]
Correct: [34.53125 61.40625]%
Cross Entropy Loss: [2.2604327 1.216335 ]
Correct: [34.21875 61.09375]%
Cross Entropy Loss: [2.0680394 1.1233469]
Correct: [36.875 64.375]%
Cross Entropy Loss: [2.1417553 1.0390809]
Correct: [36.09375 65.625  ]%
Cross Entropy Loss: [2.221739  1.1207818]
Correct: [32.34375 61.875  ]%
Cross Entropy Loss: [2.1728783 1.1037025]
Correct: [36.09375 62.03125]%
Cross Entropy Loss: [2.004637   0.88909864]
Correct: [35.625   69.84375]%
Cross Entropy Loss: [2.0044641 1.0398685]
Correct: [38.28125 64.21875]%
Cross Entropy Loss: [2.1803584 1.087485 ]
Correct: [33.125 63.125]%
Cross Entropy Loss: [2.2984557 1.2568215]
Correct: [32.5   56.875]%
Cross Entropy Loss: [2.0108495 0.9699246]
Correct: [35.78125 66.09375]%
Cross Entropy Loss: [2.2456844 1.1685024]
Correct: [32.65625 61.40625]%
Cross Entropy Loss: [2.1126568 0.9908916]
Correct: [33.4375  65.78125]%
Cross Entropy Loss: [2.034409 0.949118]
Correct: [35.15625 67.1875 ]%
Cross Entropy Loss: [2.1435022 1.1217723]
Correct: [34.6875  62.34375]%
Cross Entropy Loss: [2.1897476 1.1618035]
Correct: [34.84375 62.1875 ]%
Cross Entropy Loss: [2.0329869 1.1103725]
Correct: [37.1875  64.53125]%
Cross Entropy Loss: [2.1014895 1.0441425]
Correct: [34.53125 66.09375]%
Cross Entropy Loss: [1.8154484  0.94963896]
Correct: [40.625  67.8125]%
Cross Entropy Loss: [2.1243057 1.1432375]
Correct: [33.90625 64.53125]%
Cross Entropy Loss: [1.9253165  0.98802423]
Correct: [39.6875 67.1875]%
Cross Entropy Loss: [2.1375473 1.0561173]
Correct: [34.0625 65.9375]%
Cross Entropy Loss: [1.9771572 1.0691584]
Correct: [36.71875 63.59375]%
Cross Entropy Loss: [2.286057  1.2850182]
Correct: [31.5625  57.65625]%
Cross Entropy Loss: [2.2246888 1.1752965]
Correct: [31.875   60.15625]%
Cross Entropy Loss: [2.1300535 1.1155465]
Correct: [35.9375  64.53125]%
Cross Entropy Loss: [2.198164  1.1775932]
Correct: [34.6875  58.59375]%
Cross Entropy Loss: [2.0868506 1.0487368]
Correct: [36.25 65.  ]%
Cross Entropy Loss: [2.2554197 1.2140089]
Correct: [33.90625 60.3125 ]%
Cross Entropy Loss: [2.0523686 1.098948 ]
Correct: [38.4375 62.8125]%
Cross Entropy Loss: [2.030869  0.9532622]
Correct: [37.5     69.21875]%
Cross Entropy Loss: [2.1338582 1.095887 ]
Correct: [32.34375 64.21875]%
Cross Entropy Loss: [2.1579802 1.0763146]
Correct: [34.6875 65.3125]%
Cross Entropy Loss: [1.9006071 1.0285238]
Correct: [38.90625 64.84375]%
Cross Entropy Loss: [2.059424  1.1277937]
Correct: [34.6875  61.40625]%
Cross Entropy Loss: [2.052182 1.089211]
Correct: [35.9375  63.90625]%
Cross Entropy Loss: [2.1514564 1.167175 ]
Correct: [35.15625 61.40625]%
Cross Entropy Loss: [2.0614226 1.0655408]
Correct: [33.4375 65.625 ]%
Cross Entropy Loss: [2.2345166 1.101402 ]
Correct: [34.6875  63.28125]%
Cross Entropy Loss: [2.1128454 1.2044628]
Correct: [34.53125 62.03125]%
Cross Entropy Loss: [2.0915427 1.1047084]
Correct: [34.84375 63.28125]%
Cross Entropy Loss: [1.9457241 0.9502249]
Correct: [37.5    67.8125]%
Cross Entropy Loss: [2.0820658 1.0332737]
Correct: [35.625 66.875]%
Cross Entropy Loss: [2.2278574 1.1109495]
Correct: [34.6875  62.96875]%
Cross Entropy Loss: [2.147864 1.038889]
Correct: [33.75    64.84375]%
Cross Entropy Loss: [2.122973 1.036947]
Correct: [35.15625 66.25   ]%
Cross Entropy Loss: [1.927326  1.0124859]
Correct: [38.28125 67.03125]%
Cross Entropy Loss: [2.101642   0.92919743]
Correct: [32.65625 69.0625 ]%
Cross Entropy Loss: [2.065511  1.0606381]
Correct: [37.34375 65.15625]%
Cross Entropy Loss: [1.9517088 0.9164138]
Correct: [36.5625  69.84375]%
Cross Entropy Loss: [2.111761  1.0136173]
Correct: [35.46875 65.15625]%
Cross Entropy Loss: [2.0841212  0.93801165]
Correct: [35.78125 66.875  ]%
Cross Entropy Loss: [1.9994602 0.8875742]
Correct: [36.40625 69.0625 ]%
Cross Entropy Loss: [2.1437383 1.0999047]
Correct: [34.21875 64.84375]%
Cross Entropy Loss: [2.1095054 1.1939958]
Correct: [32.65625 61.25   ]%
Cross Entropy Loss: [1.9720078  0.96271026]
Correct: [38.59375 65.78125]%
Cross Entropy Loss: [2.1776989 1.1535435]
Correct: [33.125   62.03125]%
Cross Entropy Loss: [1.9395927  0.97437364]
Correct: [38.90625 65.9375 ]%
Cross Entropy Loss: [2.038546  1.0730937]
Correct: [35.15625 65.3125 ]%
Cross Entropy Loss: [1.9015939 0.9759699]
Correct: [41.25    69.21875]%
Cross Entropy Loss: [2.0217183 0.9832714]
Correct: [35.625   64.53125]%
Cross Entropy Loss: [2.30478  1.221925]
Correct: [30.625  60.9375]%
Cross Entropy Loss: [2.1506257 1.0740126]
Correct: [34.53125 65.46875]%
Cross Entropy Loss: [2.1389866 1.0330571]
Correct: [34.375   64.53125]%
Cross Entropy Loss: [1.9255421 1.0042489]
Correct: [39.21875 66.875  ]%
Cross Entropy Loss: [2.2296672 1.181214 ]
Correct: [32.8125  60.78125]%
Cross Entropy Loss: [2.069278  1.0030434]
Correct: [36.875   64.53125]%
Cross Entropy Loss: [2.0064278  0.94460344]
Correct: [38.4375 66.875 ]%
Cross Entropy Loss: [2.1772358 1.2595142]
Correct: [34.84375 58.90625]%
Cross Entropy Loss: [2.0186183 1.0194808]
Correct: [36.40625 65.     ]%
Cross Entropy Loss: [1.9162191 0.9660864]
Correct: [38.4375 67.5   ]%
Cross Entropy Loss: [1.923835  0.9788319]
Correct: [42.1875  66.71875]%
Cross Entropy Loss: [2.0044174  0.88486415]
Correct: [36.09375 68.59375]%
Cross Entropy Loss: [2.0481868 0.9438885]
Correct: [37.1875  70.15625]%
Cross Entropy Loss: [2.1610718 1.0521172]
Correct: [32.8125 64.6875]%
Cross Entropy Loss: [1.8198388 1.0089643]
Correct: [40.     65.3125]%
Cross Entropy Loss: [2.0301213 1.0285875]
Correct: [35.46875 65.15625]%
Cross Entropy Loss: [2.1639695 1.0849311]
Correct: [33.28125 62.65625]%
Cross Entropy Loss: [2.0639997 1.1634725]
Correct: [35.      61.71875]%
Cross Entropy Loss: [2.1597533 1.0825775]
Correct: [32.34375 61.71875]%
Cross Entropy Loss: [1.9914029  0.91455734]
Correct: [36.40625 69.21875]%
Cross Entropy Loss: [2.155964  1.0885209]
Correct: [33.4375 64.375 ]%
Cross Entropy Loss: [2.162335  0.9838692]
Correct: [32.5     65.78125]%
Cross Entropy Loss: [2.1340225 1.0214374]
Correct: [36.40625 65.46875]%
Cross Entropy Loss: [2.0981116 1.0054338]
Correct: [34.6875 66.875 ]%
Cross Entropy Loss: [2.1739647 1.1748121]
Correct: [34.6875 61.5625]%
Cross Entropy Loss: [2.1229033 1.0923307]
Correct: [34.21875 66.5625 ]%
Cross Entropy Loss: [2.066299  1.0214685]
Correct: [35.625   63.59375]%
Cross Entropy Loss: [2.076647 1.123493]
Correct: [37.65625 64.0625 ]%
Cross Entropy Loss: [1.9788625  0.97142315]
Correct: [35.625   67.34375]%
Cross Entropy Loss: [1.9470733  0.97762585]
Correct: [37.96875 68.75   ]%
Cross Entropy Loss: [2.1373453 1.1026359]
Correct: [38.59375 65.625  ]%
Cross Entropy Loss: [2.0156083 0.9452301]
Correct: [37.34375 67.96875]%
Cross Entropy Loss: [2.0818145 1.0200946]
Correct: [35.625  67.1875]%
Cross Entropy Loss: [2.1249685 1.1164192]
Correct: [37.65625 64.375  ]%
Cross Entropy Loss: [1.9614153 0.9783964]
Correct: [36.5625 65.9375]%
Cross Entropy Loss: [2.0476463 1.0480154]
Correct: [34.84375 64.0625 ]%
Cross Entropy Loss: [2.1870925 1.0375546]
Correct: [32.65625 66.25   ]%
Cross Entropy Loss: [2.0521731 1.0064946]
Correct: [35.9375 67.8125]%
Cross Entropy Loss: [1.9635112 1.0291421]
Correct: [38.28125 63.90625]%
Cross Entropy Loss: [1.8623154 0.8852413]
Correct: [38.90625 69.84375]%
Cross Entropy Loss: [2.082769  0.9863165]
Correct: [34.6875 68.125 ]%
Cross Entropy Loss: [2.1095753 1.0673689]
Correct: [32.5     64.53125]%
Cross Entropy Loss: [2.0079765  0.94975126]
Correct: [36.40625 70.15625]%
Cross Entropy Loss: [2.0949817 1.0951807]
Correct: [32.65625 63.59375]%
Cross Entropy Loss: [2.04147   1.0397809]
Correct: [34.84375 66.09375]%
Cross Entropy Loss: [2.1414847 1.1111605]
Correct: [33.4375 62.8125]%
Cross Entropy Loss: [2.008555  0.9355799]
Correct: [37.5     69.21875]%
Cross Entropy Loss: [1.9854476  0.99272937]
Correct: [35.3125  69.21875]%
Cross Entropy Loss: [2.1096752  0.98887694]
Correct: [31.875 66.25 ]%
Cross Entropy Loss: [2.0454817  0.96753806]
Correct: [37.34375 66.25   ]%
Cross Entropy Loss: [2.0520363  0.96991384]
Correct: [38.75 67.5 ]%
Cross Entropy Loss: [1.9340712  0.93183124]
Correct: [40.15625 68.4375 ]%
Cross Entropy Loss: [1.9715607  0.98458374]
Correct: [33.90625 65.46875]%
Cross Entropy Loss: [2.0755754 0.9633703]
Correct: [35.78125 68.125  ]%
Cross Entropy Loss: [2.15971   1.1696374]
Correct: [35.3125  63.28125]%
Cross Entropy Loss: [2.0350258 1.0675896]
Correct: [32.8125 64.0625]%
Cross Entropy Loss: [1.8395284  0.85548913]
Correct: [40.9375  68.59375]%
Cross Entropy Loss: [2.185341  1.1776295]
Correct: [32.03125 60.     ]%
Cross Entropy Loss: [2.1642234 1.0038843]
Correct: [34.375 66.875]%
Cross Entropy Loss: [1.9762394 0.8992426]
Correct: [35.78125 70.78125]%
Cross Entropy Loss: [2.2866092 1.0285015]
Correct: [31.09375 65.78125]%
Cross Entropy Loss: [2.039809  1.0016583]
Correct: [34.0625 67.1875]%
Cross Entropy Loss: [2.026374  1.0070478]
Correct: [36.71875 65.9375 ]%
Cross Entropy Loss: [2.0189176 1.0492398]
Correct: [35.625  64.0625]%
Cross Entropy Loss: [1.8588558 0.8286462]
Correct: [39.84375 69.53125]%
Cross Entropy Loss: [1.823314   0.88309115]
Correct: [41.40625 69.6875 ]%
Cross Entropy Loss: [1.9967377 0.9839587]
Correct: [38.125   64.53125]%
Cross Entropy Loss: [2.1429906 1.176363 ]
Correct: [34.6875 62.1875]%
Cross Entropy Loss: [2.102246  1.1777021]
Correct: [35.625 63.75 ]%
Cross Entropy Loss: [2.0479336  0.99672014]
Correct: [37.8125 65.625 ]%
Cross Entropy Loss: [2.086504  1.0598817]
Correct: [36.875  63.4375]%
Cross Entropy Loss: [1.9839866  0.95131266]
Correct: [38.75    68.90625]%
Cross Entropy Loss: [2.0775752 1.084884 ]
Correct: [36.5625 61.875 ]%
Cross Entropy Loss: [1.8948987 0.9266635]
Correct: [40.      68.28125]%
Cross Entropy Loss: [2.1420894 1.0657117]
Correct: [34.375   62.34375]%
Cross Entropy Loss: [1.9695251 1.0182383]
Correct: [37.1875 65.9375]%
Cross Entropy Loss: [2.0454605 1.0136551]
Correct: [34.21875 67.03125]%
Cross Entropy Loss: [1.9502709 0.9368871]
Correct: [37.8125  68.90625]%
Cross Entropy Loss: [2.0663705 1.1108334]
Correct: [36.875   63.59375]%
Cross Entropy Loss: [2.0945535 0.9714444]
Correct: [36.71875 67.34375]%
Cross Entropy Loss: [2.1433053 1.0119004]
Correct: [34.6875  66.40625]%
Cross Entropy Loss: [2.0788515 1.0009733]
Correct: [37.03125 65.78125]%
Cross Entropy Loss: [2.0852532 1.0584605]
Correct: [35.78125 63.90625]%
Cross Entropy Loss: [2.118054  1.0101678]
Correct: [32.8125 66.25  ]%
Cross Entropy Loss: [2.066771 1.033336]
Correct: [32.65625 64.6875 ]%
Cross Entropy Loss: [2.103252  1.0840605]
Correct: [32.65625 64.0625 ]%
Cross Entropy Loss: [2.2673702 1.0901788]
Correct: [29.6875 64.0625]%
Cross Entropy Loss: [1.9913784  0.92299396]
Correct: [36.71875 69.375  ]%
Cross Entropy Loss: [1.8102554 0.8113476]
Correct: [41.71875 71.09375]%
Cross Entropy Loss: [2.038302  1.0401952]
Correct: [34.53125 64.375  ]%
Cross Entropy Loss: [2.051036   0.98719674]
Correct: [35.9375  67.65625]%
Cross Entropy Loss: [1.9167879 0.8284891]
Correct: [38.75   72.1875]%
Cross Entropy Loss: [2.0838494  0.91691524]
Correct: [35.625  69.0625]%
Cross Entropy Loss: [2.063426  0.9960969]
Correct: [33.125   66.71875]%
Cross Entropy Loss: [1.9812944 0.9173651]
Correct: [38.28125 68.59375]%
Cross Entropy Loss: [1.9217033 0.9044016]
Correct: [40.   68.75]%
Cross Entropy Loss: [2.2036319 1.0095493]
Correct: [33.59375 64.84375]%
Cross Entropy Loss: [1.8800049  0.78754604]
Correct: [40.78125 74.0625 ]%
Cross Entropy Loss: [2.0380938 1.0347326]
Correct: [36.875  66.5625]%
Cross Entropy Loss: [2.081464  1.0433433]
Correct: [36.875   65.78125]%
Cross Entropy Loss: [2.0232406 0.9458593]
Correct: [36.40625 71.71875]%
Cross Entropy Loss: [1.9108604 1.0164564]
Correct: [38.75    66.09375]%
Cross Entropy Loss: [2.004949  0.9523772]
Correct: [37.96875 67.65625]%
Cross Entropy Loss: [1.9774355 1.0151292]
Correct: [37.34375 64.0625 ]%
Cross Entropy Loss: [2.0539386 1.0526776]
Correct: [35.15625 65.9375 ]%
Cross Entropy Loss: [2.044035   0.85352767]
Correct: [35.3125  70.78125]%
Cross Entropy Loss: [2.090612  1.0037277]
Correct: [34.53125 68.4375 ]%
Cross Entropy Loss: [2.0662942 1.0033289]
Correct: [35.15625 68.4375 ]%
Cross Entropy Loss: [2.0086524 0.8855426]
Correct: [33.90625 69.0625 ]%
Cross Entropy Loss: [2.1381028 1.1064284]
Correct: [34.0625 64.375 ]%
Cross Entropy Loss: [2.0797248 1.0261093]
Correct: [36.40625 63.125  ]%
Cross Entropy Loss: [2.0540512  0.91670114]
Correct: [35.46875 68.28125]%
Cross Entropy Loss: [2.0030293 0.8909089]
Correct: [35.      69.84375]%
Cross Entropy Loss: [1.9743208  0.93269175]
Correct: [38.4375 69.375 ]%
Cross Entropy Loss: [2.2154179 1.0871042]
Correct: [33.4375 65.3125]%
Cross Entropy Loss: [1.8008295 0.7275061]
Correct: [38.59375 74.21875]%
Cross Entropy Loss: [2.0742316  0.90788615]
Correct: [38.28125 70.3125 ]%
Cross Entropy Loss: [1.9697411 0.8168994]
Correct: [35.9375 71.875 ]%
Cross Entropy Loss: [1.8791491 0.8420742]
Correct: [37.96875 70.15625]%
Cross Entropy Loss: [1.906756  0.8323909]
Correct: [37.96875 71.40625]%
Cross Entropy Loss: [2.2395627 1.1260452]
Correct: [31.71875 63.125  ]%
Cross Entropy Loss: [2.1153424 1.051183 ]
Correct: [35.9375 65.9375]%
Cross Entropy Loss: [2.056055  0.9984495]
Correct: [36.5625 68.125 ]%
Cross Entropy Loss: [2.1028643 0.9908476]
Correct: [36.5625 67.5   ]%
Cross Entropy Loss: [1.9959347 0.9315006]
Correct: [36.40625 68.75   ]%
Cross Entropy Loss: [1.9082683 0.899495 ]
Correct: [39.6875  69.84375]%
Cross Entropy Loss: [1.9253979 0.8728326]
Correct: [38.75    68.90625]%
Cross Entropy Loss: [2.1063607 1.0259699]
Correct: [34.375   63.90625]%
Cross Entropy Loss: [1.871118  0.9642483]
Correct: [39.21875 69.375  ]%
Cross Entropy Loss: [1.9385599  0.90763825]
Correct: [37.8125 68.75  ]%
Cross Entropy Loss: [2.0868793 0.8350175]
Correct: [34.53125 71.71875]%
Cross Entropy Loss: [2.1084886 0.9789175]
Correct: [35.46875 68.28125]%
Cross Entropy Loss: [1.9617927 0.9111748]
Correct: [35.78125 69.21875]%
Cross Entropy Loss: [1.9889206 0.8517504]
Correct: [39.6875 71.25  ]%
Cross Entropy Loss: [1.9328804 0.9675538]
Correct: [35.78125 68.75   ]%
Cross Entropy Loss: [2.1333413 0.9174528]
Correct: [34.6875  67.65625]%
Cross Entropy Loss: [1.9760826  0.83193177]
Correct: [38.125   68.90625]%
Cross Entropy Loss: [1.9435221 0.9182113]
Correct: [38.125   67.96875]%
Cross Entropy Loss: [2.0305405 1.0071602]
Correct: [34.6875  65.78125]%
Cross Entropy Loss: [1.9709656  0.85718143]
Correct: [37.34375 71.25   ]%
Cross Entropy Loss: [1.8834423  0.86569375]
Correct: [42.03125 70.46875]%
Cross Entropy Loss: [2.007362  0.8775223]
Correct: [37.34375 70.     ]%
Cross Entropy Loss: [1.8114697 0.8230602]
Correct: [42.1875  71.40625]%
Cross Entropy Loss: [2.0218248  0.94192016]
Correct: [36.25    68.90625]%
Cross Entropy Loss: [1.9481544 0.877116 ]
Correct: [34.53125 70.     ]%
Cross Entropy Loss: [1.8967278  0.84955156]
Correct: [41.25   67.8125]%
Cross Entropy Loss: [2.094396   0.94795036]
Correct: [34.84375 69.21875]%
Cross Entropy Loss: [2.207121  1.1869978]
Correct: [33.59375 62.65625]%
Cross Entropy Loss: [1.8258415 0.7939478]
Correct: [40.15625 72.8125 ]%
Cross Entropy Loss: [1.9951465 0.9430982]
Correct: [37.5     68.90625]%
Cross Entropy Loss: [1.9022526  0.81949484]
Correct: [40.3125 71.5625]%
Cross Entropy Loss: [1.9140657 0.8729671]
Correct: [37.1875  69.53125]%
Cross Entropy Loss: [2.008566  0.8693367]
Correct: [36.25   71.5625]%
Cross Entropy Loss: [1.9715729 0.8278133]
Correct: [37.96875 73.59375]%
Cross Entropy Loss: [2.0428317 0.9635724]
Correct: [35.3125  67.65625]%
Cross Entropy Loss: [2.1024654 0.9612794]
Correct: [34.53125 69.0625 ]%
Cross Entropy Loss: [1.8582609  0.91612273]
Correct: [37.34375 68.90625]%
Cross Entropy Loss: [1.8201456 0.8022062]
Correct: [39.6875  73.90625]%
Cross Entropy Loss: [2.1150851 0.8814806]
Correct: [34.6875  69.53125]%
Cross Entropy Loss: [1.8661525  0.90453565]
Correct: [40.      69.84375]%
Cross Entropy Loss: [2.0134192 1.0000198]
Correct: [35.9375 65.3125]%
Cross Entropy Loss: [1.8169205  0.86087525]
Correct: [41.25   69.6875]%
Cross Entropy Loss: [2.057557  1.0153273]
Correct: [34.375   66.71875]%
Cross Entropy Loss: [2.2838652 1.2699617]
Correct: [31.875   58.90625]%
Cross Entropy Loss: [2.211864  1.1973352]
Correct: [30.46875 61.5625 ]%
Cross Entropy Loss: [2.0849414 1.0094254]
Correct: [36.40625 69.375  ]%
Cross Entropy Loss: [1.8548931 0.9216703]
Correct: [39.6875  68.90625]%
Cross Entropy Loss: [2.1101737 1.108732 ]
Correct: [33.125   63.90625]%
Cross Entropy Loss: [1.8950717  0.95252293]
Correct: [38.59375 65.625  ]%
Cross Entropy Loss: [2.07124   0.9245955]
Correct: [36.09375 69.375  ]%
Cross Entropy Loss: [2.1457744 1.0181249]
Correct: [37.03125 65.9375 ]%
Cross Entropy Loss: [2.018524  0.9240667]
Correct: [36.09375 69.0625 ]%
Cross Entropy Loss: [2.0776353 0.8561937]
Correct: [34.0625 70.9375]%
Cross Entropy Loss: [1.8461525 0.8619221]
Correct: [41.40625 70.46875]%
Cross Entropy Loss: [1.87443   0.8597623]
Correct: [39.6875  68.90625]%
Cross Entropy Loss: [1.9762619  0.92338103]
Correct: [36.71875 67.8125 ]%
Cross Entropy Loss: [2.0295157  0.91475487]
Correct: [34.53125 67.1875 ]%
Cross Entropy Loss: [1.8100322  0.81127787]
Correct: [41.875 71.875]%
Cross Entropy Loss: [1.8805202  0.83634174]
Correct: [40.9375  71.71875]%
Cross Entropy Loss: [1.9607834  0.91157883]
Correct: [37.34375 67.96875]%
Cross Entropy Loss: [1.9991659  0.81968766]
Correct: [37.8125  71.71875]%
Cross Entropy Loss: [2.040118   0.94399345]
Correct: [36.25   69.6875]%
Cross Entropy Loss: [1.8565986 0.8354818]
Correct: [38.28125 68.4375 ]%
Cross Entropy Loss: [2.0068622 0.9025556]
Correct: [36.09375 68.90625]%
Cross Entropy Loss: [1.9592178 0.9652044]
Correct: [39.21875 67.5    ]%
Cross Entropy Loss: [1.8571497 0.8150614]
Correct: [39.21875 72.34375]%
Cross Entropy Loss: [1.8932117 0.7968904]
Correct: [36.875   72.34375]%
Cross Entropy Loss: [1.8209116 0.8289099]
Correct: [39.375  70.9375]%
Cross Entropy Loss: [1.8418076  0.78967446]
Correct: [39.21875 73.4375 ]%
Cross Entropy Loss: [2.1072123 0.9603957]
Correct: [33.28125 67.1875 ]%
Cross Entropy Loss: [2.0138998 0.8793723]
Correct: [36.5625 70.3125]%
Cross Entropy Loss: [1.8939085 1.0299318]
Correct: [35.625   66.40625]%
Cross Entropy Loss: [1.8461876  0.82650834]
Correct: [41.25 71.25]%
Cross Entropy Loss: [1.7470993 0.9157623]
Correct: [40.9375 69.6875]%
Cross Entropy Loss: [2.16223   0.9695276]
Correct: [33.59375 65.9375 ]%
Cross Entropy Loss: [1.7484906 0.8004014]
Correct: [43.90625 72.96875]%
Cross Entropy Loss: [1.9151303  0.97053415]
Correct: [36.875 66.875]%
Cross Entropy Loss: [2.206915  1.0458224]
Correct: [31.71875 65.     ]%
Cross Entropy Loss: [2.2378707  0.97487515]
Correct: [32.8125 66.25  ]%
Cross Entropy Loss: [1.9031665 0.8776596]
Correct: [42.65625 67.96875]%
Cross Entropy Loss: [2.121243   0.91433924]
Correct: [35.46875 70.3125 ]%
Cross Entropy Loss: [1.900575  0.8679679]
Correct: [39.6875 71.875 ]%
Cross Entropy Loss: [2.0481718 0.9492146]
Correct: [36.25  66.875]%
Cross Entropy Loss: [2.0551074 0.8782078]
Correct: [38.59375 71.875  ]%
Cross Entropy Loss: [2.0109646 1.0062387]
Correct: [36.40625 66.71875]%
Cross Entropy Loss: [1.9494514  0.93650657]
Correct: [37.5     67.34375]%
Cross Entropy Loss: [1.9022074 0.9006504]
Correct: [37.34375 70.625  ]%
Cross Entropy Loss: [2.0157673 0.9231532]
Correct: [38.125 69.375]%
Cross Entropy Loss: [1.8017086 0.8347894]
Correct: [40.3125 70.3125]%
Cross Entropy Loss: [1.8712553 0.8199031]
Correct: [39.6875  72.03125]%
Cross Entropy Loss: [1.9206536 0.8757763]
Correct: [37.65625 69.375  ]%
Cross Entropy Loss: [1.9069599 0.7943515]
Correct: [39.84375 72.8125 ]%
Cross Entropy Loss: [2.0004506 0.9129486]
Correct: [36.40625 68.90625]%
Cross Entropy Loss: [1.9126017 0.8481139]
Correct: [38.28125 71.5625 ]%
Cross Entropy Loss: [1.9422376  0.80118704]
Correct: [37.8125 72.1875]%
Cross Entropy Loss: [2.1069267 0.9961673]
Correct: [33.75    65.46875]%
Cross Entropy Loss: [1.6956062  0.72127473]
Correct: [43.125   76.09375]%
Cross Entropy Loss: [2.0056605 0.8271977]
Correct: [34.6875  71.09375]%
Cross Entropy Loss: [1.7731626 0.7364307]
Correct: [42.8125 73.75  ]%
Cross Entropy Loss: [1.8813254 0.819637 ]
Correct: [39.53125 72.03125]%
Cross Entropy Loss: [1.7974262  0.77863276]
Correct: [42.5     73.59375]%
Cross Entropy Loss: [2.0057938 0.9535781]
Correct: [38.75    67.65625]%
Cross Entropy Loss: [2.1011817 1.0164788]
Correct: [35.625  67.1875]%
Cross Entropy Loss: [1.9315411 0.9937328]
Correct: [37.8125 69.375 ]%
Cross Entropy Loss: [1.7565416  0.90591544]
Correct: [41.71875 69.84375]%
Cross Entropy Loss: [2.104596  0.9485844]
Correct: [33.28125 67.1875 ]%
Cross Entropy Loss: [1.9717503 0.9401512]
Correct: [37.03125 68.125  ]%
Cross Entropy Loss: [1.8597686 0.8337289]
Correct: [39.53125 71.25   ]%
Cross Entropy Loss: [2.0891156 1.0075617]
Correct: [33.90625 66.40625]%
Cross Entropy Loss: [1.8018882  0.86392486]
Correct: [41.875   73.59375]%
Cross Entropy Loss: [1.842123  0.8833376]
Correct: [38.59375 69.0625 ]%
Cross Entropy Loss: [2.105988  1.0121101]
Correct: [34.0625 67.5   ]%
Cross Entropy Loss: [1.8361633 0.7369094]
Correct: [40.3125 75.625 ]%
Cross Entropy Loss: [1.9523077 0.9245311]
Correct: [37.1875  66.40625]%
Cross Entropy Loss: [1.8830221 0.818847 ]
Correct: [39.6875 72.1875]%
Cross Entropy Loss: [1.9296334 0.7605677]
Correct: [35.      75.15625]%
Cross Entropy Loss: [1.8666788 0.8473328]
Correct: [36.5625  73.28125]%
Cross Entropy Loss: [1.8564093  0.82096183]
Correct: [39.0625 72.1875]%
Cross Entropy Loss: [2.0005364 0.9816345]
Correct: [36.875   66.40625]%
Cross Entropy Loss: [1.8640411 0.7988927]
Correct: [39.53125 72.65625]%
Cross Entropy Loss: [2.0341678 0.9065712]
Correct: [36.09375 68.90625]%
Cross Entropy Loss: [1.8532254  0.83622426]
Correct: [42.34375 71.25   ]%
Cross Entropy Loss: [1.9868752  0.85001355]
Correct: [38.125   70.46875]%
Cross Entropy Loss: [2.0091524  0.93257064]
Correct: [37.5  71.25]%
Cross Entropy Loss: [1.9768187 0.8339845]
Correct: [39.375   72.96875]%
Cross Entropy Loss: [1.8741996 0.8691395]
Correct: [41.25    69.21875]%
Cross Entropy Loss: [1.8143088  0.86440974]
Correct: [41.875  69.6875]%
Cross Entropy Loss: [1.9595731 0.8242504]
Correct: [37.96875 73.75   ]%
Cross Entropy Loss: [1.9229256  0.89336395]
Correct: [41.09375 70.3125 ]%
Cross Entropy Loss: [1.9706309 0.8748406]
Correct: [38.4375 70.3125]%
Cross Entropy Loss: [1.8873885  0.81311667]
Correct: [38.59375 71.875  ]%
Cross Entropy Loss: [1.779002  0.8064486]
Correct: [39.6875 71.5625]%
Cross Entropy Loss: [1.8850933  0.85457975]
Correct: [39.53125 70.78125]%
Cross Entropy Loss: [1.972842   0.93280584]
Correct: [37.34375 68.59375]%
Cross Entropy Loss: [1.8523144  0.81908023]
Correct: [38.28125 72.03125]%
Cross Entropy Loss: [2.0654578  0.88964653]
Correct: [38.59375 69.21875]%
Cross Entropy Loss: [1.8929327  0.87478036]
Correct: [39.375 66.875]%
Cross Entropy Loss: [1.9595068  0.87330186]
Correct: [36.40625 70.46875]%
Cross Entropy Loss: [1.8157251  0.80360574]
Correct: [42.03125 75.15625]%
Cross Entropy Loss: [2.0758657 0.9335375]
Correct: [36.875   68.90625]%
Cross Entropy Loss: [1.7975537 0.7471707]
Correct: [39.375   77.03125]%
Cross Entropy Loss: [2.1640186 0.9194107]
Correct: [34.53125 70.     ]%
Cross Entropy Loss: [2.0383983  0.88080823]
Correct: [36.09375 70.46875]%
Cross Entropy Loss: [1.9370111 0.8759388]
Correct: [37.34375 69.53125]%
Cross Entropy Loss: [1.9309971 0.8646889]
Correct: [36.09375 70.     ]%
Cross Entropy Loss: [1.931666  0.8405374]
Correct: [38.59375 71.09375]%
Cross Entropy Loss: [1.8376986 0.7887496]
Correct: [38.4375 71.875 ]%
Cross Entropy Loss: [2.2788987 1.1905185]
Correct: [29.84375 62.1875 ]%
Cross Entropy Loss: [1.8479617 0.8240677]
Correct: [42.34375 72.5    ]%
Cross Entropy Loss: [2.0042377  0.98595554]
Correct: [37.03125 66.71875]%
Cross Entropy Loss: [1.9769138 0.9139818]
Correct: [36.71875 67.5    ]%
Cross Entropy Loss: [1.7747654 0.7606961]
Correct: [45.15625 74.0625 ]%
Cross Entropy Loss: [2.0168662  0.88941824]
Correct: [35.46875 67.8125 ]%
Cross Entropy Loss: [2.070307   0.85203993]
Correct: [35.625   70.46875]%
Cross Entropy Loss: [1.8717296 0.8054962]
Correct: [41.25   72.1875]%
Cross Entropy Loss: [1.8577547 0.7757021]
Correct: [40.     72.8125]%
Cross Entropy Loss: [1.9475365 0.794682 ]
Correct: [36.25    73.90625]%
Cross Entropy Loss: [1.9350469  0.87391603]
Correct: [36.09375 72.1875 ]%
Cross Entropy Loss: [1.7236805 0.7195571]
Correct: [42.03125 75.78125]%
Cross Entropy Loss: [1.922018 0.759751]
Correct: [36.25    77.34375]%
Cross Entropy Loss: [1.9699196 0.819997 ]
Correct: [34.6875 72.8125]%
Cross Entropy Loss: [1.9818363 1.0269458]
Correct: [38.28125 66.875  ]%
Cross Entropy Loss: [2.0057654 0.9270563]
Correct: [35.46875 69.21875]%
Cross Entropy Loss: [2.0105329 0.9344529]
Correct: [37.03125 68.4375 ]%
Cross Entropy Loss: [1.8280458 0.8609055]
Correct: [41.09375 71.5625 ]%
Cross Entropy Loss: [1.8233522  0.78186095]
Correct: [41.875 73.125]%
Cross Entropy Loss: [1.8733145  0.77569723]
Correct: [39.84375 74.0625 ]%
Cross Entropy Loss: [1.9086692  0.89578104]
Correct: [37.03125 67.8125 ]%
Cross Entropy Loss: [1.9343178  0.86399776]
Correct: [37.1875  71.09375]%
Cross Entropy Loss: [2.104353  1.0212682]
Correct: [36.09375 67.1875 ]%
Cross Entropy Loss: [1.854091  0.8356699]
Correct: [37.5  71.25]%
Cross Entropy Loss: [1.9753675 0.744878 ]
Correct: [37.5     72.96875]%
Cross Entropy Loss: [1.8681492  0.94932014]
Correct: [40.      68.90625]%
Cross Entropy Loss: [1.9681084  0.91747314]
Correct: [37.96875 70.46875]%
Cross Entropy Loss: [1.8961118 0.8435539]
Correct: [42.34375 72.96875]%
Cross Entropy Loss: [1.9987698 0.9005434]
Correct: [36.71875 71.5625 ]%
Cross Entropy Loss: [2.022813   0.83170307]
Correct: [37.1875  72.65625]%
Cross Entropy Loss: [1.9700253 0.7737605]
Correct: [37.8125  74.84375]%
Cross Entropy Loss: [1.7519125 0.7772139]
Correct: [43.90625 74.6875 ]%
Cross Entropy Loss: [1.7379764 0.6641899]
Correct: [43.28125 75.625  ]%
Cross Entropy Loss: [1.8900242  0.77724296]
Correct: [40.46875 74.6875 ]%
Cross Entropy Loss: [1.8679287 0.8260145]
Correct: [38.28125 73.4375 ]%
Cross Entropy Loss: [1.8456215 0.8153187]
Correct: [36.5625  71.40625]%
Cross Entropy Loss: [1.9230366 0.9136008]
Correct: [39.21875 70.15625]%
Cross Entropy Loss: [1.8794467 0.8675393]
Correct: [39.53125 70.9375 ]%
Cross Entropy Loss: [1.8625603 0.8100252]
Correct: [41.09375 71.875  ]%
Cross Entropy Loss: [2.0904164  0.93784904]
Correct: [35.3125 68.75  ]%
Cross Entropy Loss: [1.795028  0.7777196]
Correct: [39.6875  74.84375]%
Cross Entropy Loss: [1.801631  0.8425695]
Correct: [39.0625 70.9375]%
Cross Entropy Loss: [1.9484508  0.93190944]
Correct: [37.96875 68.125  ]%
Cross Entropy Loss: [2.1257846 0.9399969]
Correct: [35.9375  67.34375]%
Cross Entropy Loss: [1.8810345 0.8865019]
Correct: [38.75    69.53125]%
Cross Entropy Loss: [1.7929665 0.7977549]
Correct: [43.125 74.375]%
Cross Entropy Loss: [2.0304465 0.9259912]
Correct: [36.40625 70.46875]%
Cross Entropy Loss: [1.7975321 0.7160188]
Correct: [40.15625 76.09375]%
Cross Entropy Loss: [1.7806438 0.7571734]
Correct: [40.9375  73.28125]%
Cross Entropy Loss: [1.8550602  0.83024037]
Correct: [40.9375 74.375 ]%
Cross Entropy Loss: [1.8688806 0.7664345]
Correct: [39.21875 72.34375]%
Cross Entropy Loss: [2.0740001 1.1211672]
Correct: [36.25    64.84375]%
Cross Entropy Loss: [1.9740181 0.9865762]
Correct: [38.75    67.03125]%
Cross Entropy Loss: [1.8036566  0.88284224]
Correct: [40.3125  69.84375]%
Cross Entropy Loss: [1.7186031 0.7822596]
Correct: [44.0625  73.28125]%
Cross Entropy Loss: [2.0840805  0.96791315]
Correct: [35.      68.59375]%
Cross Entropy Loss: [1.9466257 0.8581923]
Correct: [39.21875 70.     ]%
Cross Entropy Loss: [2.1923652 1.0171297]
Correct: [32.96875 66.71875]%
Cross Entropy Loss: [1.7358977 0.8211607]
Correct: [44.84375 70.9375 ]%
Cross Entropy Loss: [1.8393488 0.7764181]
Correct: [40.15625 71.875  ]%
Cross Entropy Loss: [1.8784053 0.8645853]
Correct: [39.53125 71.09375]%
Cross Entropy Loss: [1.924626   0.84048986]
Correct: [39.21875 70.3125 ]%
Cross Entropy Loss: [1.9126778 0.8370615]
Correct: [39.53125 71.71875]%
Cross Entropy Loss: [1.8405005  0.80549777]
Correct: [39.53125 72.8125 ]%
Cross Entropy Loss: [1.9359353 0.8411915]
Correct: [38.125 72.5  ]%
Cross Entropy Loss: [1.9088173 0.8460366]
Correct: [38.59375 70.625  ]%
Cross Entropy Loss: [1.9105752 0.9926637]
Correct: [42.34375 66.5625 ]%
Cross Entropy Loss: [1.933572   0.92993164]
Correct: [36.875 70.625]%
Cross Entropy Loss: [2.0380778 1.0145761]
Correct: [34.6875  66.09375]%
Cross Entropy Loss: [1.9041293 0.9057522]
Correct: [37.8125  69.21875]%
Cross Entropy Loss: [1.9815823 0.9697765]
Correct: [38.90625 69.375  ]%
Cross Entropy Loss: [1.9892416  0.88426065]
Correct: [37.65625 70.3125 ]%
Cross Entropy Loss: [1.8816185  0.80110466]
Correct: [38.125  72.1875]%
Cross Entropy Loss: [2.0717053 0.955723 ]
Correct: [34.53125 68.59375]%
Cross Entropy Loss: [1.7464058 0.7554139]
Correct: [40.9375 74.0625]%
Cross Entropy Loss: [2.1676414 1.0016232]
Correct: [32.5   68.125]%
Cross Entropy Loss: [1.9097683 0.8004856]
Correct: [40.46875 72.65625]%
Cross Entropy Loss: [2.16476  0.879608]
Correct: [34.53125 68.75   ]%
Cross Entropy Loss: [1.9262445 0.8053833]
Correct: [36.40625 72.65625]%
Cross Entropy Loss: [1.8979485  0.76407635]
Correct: [39.375  74.0625]%
Cross Entropy Loss: [1.7536768 0.74233  ]
Correct: [40.78125 75.15625]%
Cross Entropy Loss: [1.9577118 0.7669058]
Correct: [40.15625 73.75   ]%
Cross Entropy Loss: [1.8368769 0.8562708]
Correct: [38.28125 71.5625 ]%
Cross Entropy Loss: [1.783328  0.7759444]
Correct: [40.46875 70.9375 ]%
Cross Entropy Loss: [1.8648913 0.7659849]
Correct: [39.6875  74.21875]%
Cross Entropy Loss: [1.8586801  0.77837664]
Correct: [38.90625 74.21875]%
Cross Entropy Loss: [1.7963082 0.8670179]
Correct: [41.09375 69.6875 ]%
Cross Entropy Loss: [1.620408  0.6663086]
Correct: [45.625   75.78125]%
Cross Entropy Loss: [2.0322433 0.8823472]
Correct: [34.84375 70.9375 ]%
Cross Entropy Loss: [1.9028265  0.81525767]
Correct: [39.84375 72.5    ]%
Cross Entropy Loss: [1.8053081 0.7608803]
Correct: [40.625   73.28125]%
Cross Entropy Loss: [1.8616015 0.7837942]
Correct: [40.15625 73.4375 ]%
Cross Entropy Loss: [1.9424927  0.82732296]
Correct: [37.5     71.71875]%
Cross Entropy Loss: [1.8460591  0.76542264]
Correct: [39.21875 72.65625]%
Cross Entropy Loss: [1.7708626 0.7570559]
Correct: [43.125 73.75 ]%
Cross Entropy Loss: [1.8902152 0.731264 ]
Correct: [37.96875 75.15625]%
Cross Entropy Loss: [2.0207415  0.86403716]
Correct: [36.09375 70.625  ]%
Cross Entropy Loss: [1.917359   0.88603914]
Correct: [37.03125 69.375  ]%
Cross Entropy Loss: [1.8501469 0.7857279]
Correct: [38.59375 73.59375]%
Cross Entropy Loss: [2.0371935 0.9316718]
Correct: [36.09375 70.625  ]%
Cross Entropy Loss: [1.8210008 0.7817372]
Correct: [40.3125  72.34375]%
Cross Entropy Loss: [1.9289589  0.95939666]
Correct: [39.53125 68.90625]%
Cross Entropy Loss: [1.9263216 0.9614647]
Correct: [37.65625 68.59375]%
Cross Entropy Loss: [1.8370911  0.79328144]
Correct: [40.      73.90625]%
Cross Entropy Loss: [1.9507647 0.8526163]
Correct: [36.71875 72.1875 ]%
Cross Entropy Loss: [1.9363921 0.9032999]
Correct: [37.34375 71.71875]%
Cross Entropy Loss: [1.9533507 0.9148165]
Correct: [37.96875 70.     ]%
Cross Entropy Loss: [1.9914684 0.911847 ]
Correct: [36.875   67.34375]%
Cross Entropy Loss: [1.614749   0.63450116]
Correct: [46.40625 76.5625 ]%
Cross Entropy Loss: [1.9312961  0.92783374]
Correct: [42.1875 68.4375]%
Cross Entropy Loss: [1.9931784 0.8473482]
Correct: [36.71875 74.6875 ]%
Cross Entropy Loss: [1.7385061 0.7419902]
Correct: [40.625   74.53125]%
Cross Entropy Loss: [1.7825711  0.69047385]
Correct: [41.09375 75.46875]%
Cross Entropy Loss: [1.8476931  0.77098066]
Correct: [38.4375  72.03125]%
Cross Entropy Loss: [1.9427814 0.8906975]
Correct: [37.65625 70.46875]%
Cross Entropy Loss: [1.7537069 0.715185 ]
Correct: [41.09375 76.25   ]%
Cross Entropy Loss: [2.0039148  0.87706554]
Correct: [36.875 70.625]%
Cross Entropy Loss: [1.9084885 0.8079421]
Correct: [39.6875  72.03125]%
Cross Entropy Loss: [1.9524136 0.9049112]
Correct: [38.125   69.21875]%
Cross Entropy Loss: [1.9638226 0.9228509]
Correct: [35.3125  70.15625]%
Cross Entropy Loss: [1.8364604 0.887214 ]
Correct: [40.9375 72.5   ]%
Cross Entropy Loss: [1.9844329 0.8993899]
Correct: [34.21875 71.09375]%
Cross Entropy Loss: [1.8521307  0.84208095]
Correct: [40.3125  71.09375]%
Cross Entropy Loss: [1.8735259 0.806561 ]
Correct: [41.25   74.0625]%
Cross Entropy Loss: [2.041991   0.88094074]
Correct: [33.59375 73.28125]%
Cross Entropy Loss: [1.8568529 0.8176139]
Correct: [40.3125  72.34375]%
Cross Entropy Loss: [2.0302157 0.8969573]
Correct: [36.5625 68.4375]%
Cross Entropy Loss: [1.7716072  0.74991757]
Correct: [39.6875  72.65625]%
Cross Entropy Loss: [1.8532816 0.7524964]
Correct: [39.21875 75.     ]%
Cross Entropy Loss: [1.8422518  0.80005664]
Correct: [39.375   73.90625]%
Cross Entropy Loss: [1.9510149  0.86358345]
Correct: [35.625 72.5  ]%
Cross Entropy Loss: [2.0163574 0.8258394]
Correct: [37.1875  73.28125]%
Cross Entropy Loss: [2.0908425 0.8985162]
Correct: [34.21875 71.09375]%
Cross Entropy Loss: [1.7037485 0.8673447]
Correct: [41.09375 71.25   ]%
Cross Entropy Loss: [1.9577576 1.0369743]
Correct: [35.78125 67.34375]%
Cross Entropy Loss: [1.8475662  0.78130686]
Correct: [38.59375 75.15625]%
Cross Entropy Loss: [1.753485   0.73732376]
Correct: [41.875   76.09375]%
Cross Entropy Loss: [1.8578752  0.79166925]
Correct: [39.375   73.59375]%
Cross Entropy Loss: [1.6485764  0.63924503]
Correct: [44.375   77.65625]%
Cross Entropy Loss: [1.8745857 0.8055066]
Correct: [39.21875 72.03125]%
Cross Entropy Loss: [1.9265884  0.77652436]
Correct: [37.1875  74.21875]%
Cross Entropy Loss: [1.9092814 0.8262763]
Correct: [37.03125 72.8125 ]%
Cross Entropy Loss: [2.0068774 0.8397468]
Correct: [38.125  70.3125]%
Cross Entropy Loss: [1.9017241  0.72965163]
Correct: [37.96875 73.59375]%
Cross Entropy Loss: [1.7207447 0.7242519]
Correct: [42.34375 75.625  ]%
Cross Entropy Loss: [1.9204438  0.75303376]
Correct: [37.96875 75.15625]%
Cross Entropy Loss: [1.9481173 0.8830689]
Correct: [37.8125 71.25  ]%
Cross Entropy Loss: [1.9528767 0.859581 ]
Correct: [37.96875 70.46875]%
Cross Entropy Loss: [1.8462613 0.7778927]
Correct: [41.875 72.5  ]%
Cross Entropy Loss: [2.0351148 0.8896805]
Correct: [37.1875 67.8125]%
Cross Entropy Loss: [1.7875017  0.72613716]
Correct: [41.71875 77.03125]%
Cross Entropy Loss: [2.060522   0.89852035]
Correct: [35.9375 71.25  ]%
Cross Entropy Loss: [2.1853232 0.9578756]
Correct: [32.03125 66.71875]%
Cross Entropy Loss: [1.925757  0.8648012]
Correct: [36.71875 69.375  ]%
Cross Entropy Loss: [1.992087  0.7929315]
Correct: [35.3125 73.4375]%
Cross Entropy Loss: [2.01481    0.94499046]
Correct: [34.375 70.   ]%
Cross Entropy Loss: [1.964996  0.8034984]
Correct: [36.09375 71.40625]%
Cross Entropy Loss: [1.7472908 0.7729301]
Correct: [41.40625 71.5625 ]%
Cross Entropy Loss: [1.8158588 0.7522197]
Correct: [40.9375 73.4375]%
Cross Entropy Loss: [1.8047183  0.76666003]
Correct: [40.625 74.375]%
Cross Entropy Loss: [1.8027279 0.7281383]
Correct: [42.03125 75.625  ]%
Cross Entropy Loss: [1.9228687 0.8286122]
Correct: [38.90625 72.34375]%
Cross Entropy Loss: [1.9521363 0.7952709]
Correct: [37.65625 73.125  ]%
Cross Entropy Loss: [1.8357687 0.881378 ]
Correct: [38.125 70.   ]%
Cross Entropy Loss: [1.6936095 0.7067156]
Correct: [42.65625 76.5625 ]%
Cross Entropy Loss: [1.7225593 0.7321889]
Correct: [42.65625 74.21875]%
Cross Entropy Loss: [1.8704498 0.805075 ]
Correct: [41.5625 70.9375]%
Cross Entropy Loss: [1.8790281  0.84706706]
Correct: [39.84375 71.875  ]%
Cross Entropy Loss: [1.9385659 0.8893654]
Correct: [42.5    69.6875]%
Cross Entropy Loss: [1.6932901 0.7578677]
Correct: [43.59375 74.375  ]%
Cross Entropy Loss: [1.9397055  0.79557765]
Correct: [38.4375 71.5625]%
Cross Entropy Loss: [1.9742695  0.92260516]
Correct: [36.09375 69.375  ]%
Cross Entropy Loss: [1.9360796 0.8839205]
Correct: [38.75    72.03125]%
Cross Entropy Loss: [1.8806629  0.78126305]
Correct: [39.6875 72.8125]%
Cross Entropy Loss: [1.8520586 0.7962285]
Correct: [38.59375 71.875  ]%
Cross Entropy Loss: [1.766799   0.74423754]
Correct: [39.53125 73.125  ]%
Cross Entropy Loss: [1.9988178 0.9007322]
Correct: [34.0625  71.71875]%
Cross Entropy Loss: [1.7290862 0.7139652]
Correct: [41.40625 76.875  ]%
Cross Entropy Loss: [1.9466734  0.86640644]
Correct: [38.59375 70.625  ]%
Cross Entropy Loss: [1.7270145 0.7308275]
Correct: [44.53125 74.53125]%
Cross Entropy Loss: [1.9447199 0.8012664]
Correct: [35.625  72.1875]%
Cross Entropy Loss: [1.8143451  0.65898883]
Correct: [43.75   79.0625]%
Cross Entropy Loss: [1.8238277 0.6614629]
Correct: [40.      76.71875]%
Cross Entropy Loss: [2.1447015  0.82475156]
Correct: [33.59375 73.125  ]%
Cross Entropy Loss: [1.748835  0.7227561]
Correct: [41.25    75.46875]%
Cross Entropy Loss: [1.640433  0.7423871]
Correct: [43.28125 72.8125 ]%
Cross Entropy Loss: [1.9428673  0.83998144]
Correct: [39.84375 71.5625 ]%
Cross Entropy Loss: [1.9022251 0.8594006]
Correct: [38.90625 70.     ]%
Cross Entropy Loss: [1.8290266 0.7587792]
Correct: [41.09375 75.15625]%
Cross Entropy Loss: [1.961203   0.83604777]
Correct: [35.78125 71.875  ]%
Cross Entropy Loss: [1.8744158  0.69790745]
Correct: [40.3125 74.6875]%
Cross Entropy Loss: [1.9239438 0.7832534]
Correct: [40.9375 73.4375]%
Cross Entropy Loss: [1.8803711  0.82782394]
Correct: [38.4375  71.40625]%
Cross Entropy Loss: [1.7187535  0.72216594]
Correct: [41.40625 74.53125]%
Cross Entropy Loss: [1.8876143  0.88217294]
Correct: [37.03125 70.625  ]%
Cross Entropy Loss: [2.0322223 0.9439659]
Correct: [36.40625 69.6875 ]%
Cross Entropy Loss: [1.798638  0.7505001]
Correct: [40.15625 75.15625]%
Cross Entropy Loss: [1.9870716 0.8772524]
Correct: [36.40625 69.21875]%
Cross Entropy Loss: [1.917894   0.85916185]
Correct: [38.28125 70.46875]%
Cross Entropy Loss: [1.8892101 0.8692805]
Correct: [37.1875  70.46875]%
Cross Entropy Loss: [1.9010273 0.8232665]
Correct: [40.3125  73.28125]%
Cross Entropy Loss: [2.0802288 1.0289383]
Correct: [35.9375  65.15625]%
Cross Entropy Loss: [1.8329494 0.7942332]
Correct: [41.5625  74.21875]%
Cross Entropy Loss: [1.7941059  0.78738225]
Correct: [41.875   73.28125]%
Cross Entropy Loss: [1.8591225  0.84015816]
Correct: [37.65625 69.21875]%
Cross Entropy Loss: [1.8191704 0.798617 ]
Correct: [39.21875 74.53125]%
Cross Entropy Loss: [1.8748503 0.8166522]
Correct: [36.09375 71.5625 ]%
Cross Entropy Loss: [1.833547  0.7868756]
Correct: [41.09375 73.90625]%
Cross Entropy Loss: [1.9132407 0.7572608]
Correct: [36.5625  74.21875]%
Cross Entropy Loss: [1.7970073  0.72719634]
Correct: [40.625  75.3125]%
Cross Entropy Loss: [2.0542548  0.88947326]
Correct: [35.9375  70.78125]%
Cross Entropy Loss: [1.7882534  0.79527473]
Correct: [39.6875 74.0625]%
Cross Entropy Loss: [1.8857523  0.78197956]
Correct: [37.03125 72.34375]%
Cross Entropy Loss: [1.7112141  0.76920956]
Correct: [44.53125 75.15625]%
Cross Entropy Loss: [1.9425137 0.8701912]
Correct: [35.78125 70.3125 ]%
Cross Entropy Loss: [2.0434132 0.8933698]
Correct: [35.9375 71.875 ]%
Cross Entropy Loss: [1.964941   0.87696093]
Correct: [38.28125 69.375  ]%
Cross Entropy Loss: [1.82149   0.8706158]
Correct: [41.875 69.375]%
Cross Entropy Loss: [2.0813553  0.88953656]
Correct: [35.      70.78125]%
Cross Entropy Loss: [1.7802708 0.9378398]
Correct: [41.09375 68.28125]%
Cross Entropy Loss: [1.8172184  0.80655986]
Correct: [39.375   72.65625]%
Cross Entropy Loss: [1.7386605 0.6764223]
Correct: [41.5625  76.40625]%
Cross Entropy Loss: [1.727205   0.73136735]
Correct: [45.625 75.625]%
Cross Entropy Loss: [1.7663656 0.7655145]
Correct: [42.65625 72.96875]%
Cross Entropy Loss: [2.0209367 0.8089984]
Correct: [33.59375 73.90625]%
Cross Entropy Loss: [1.8921497  0.79230523]
Correct: [39.375 72.5  ]%
Cross Entropy Loss: [1.7291794  0.78376675]
Correct: [44.84375 75.15625]%
Cross Entropy Loss: [1.8169559  0.80799186]
Correct: [41.09375 73.4375 ]%
Cross Entropy Loss: [1.8378828  0.71711683]
Correct: [39.21875 75.46875]%
Cross Entropy Loss: [2.0093637  0.90378016]
Correct: [37.5    69.6875]%
Cross Entropy Loss: [1.8116295 0.7256064]
Correct: [38.90625 75.625  ]%
Cross Entropy Loss: [1.879606   0.76836556]
Correct: [40.46875 72.34375]%
Cross Entropy Loss: [1.8728466  0.74831885]
Correct: [38.28125 73.4375 ]%
Cross Entropy Loss: [2.0651727  0.83994997]
Correct: [37.34375 72.65625]%
Cross Entropy Loss: [1.7519779  0.73355186]
Correct: [41.25    74.21875]%
Cross Entropy Loss: [1.880398  0.8016366]
Correct: [38.90625 74.0625 ]%
Cross Entropy Loss: [1.9904398  0.84796923]
Correct: [36.5625  71.09375]%
Cross Entropy Loss: [1.9601996 0.9325431]
Correct: [38.75  69.375]%
Cross Entropy Loss: [1.9206187  0.82546365]
Correct: [37.8125  70.46875]%
Cross Entropy Loss: [1.766711   0.71583015]
Correct: [39.53125 75.46875]%
Cross Entropy Loss: [1.9270916 0.7647687]
Correct: [37.03125 74.6875 ]%
Cross Entropy Loss: [1.8428707 0.7320514]
Correct: [43.28125 75.625  ]%
Cross Entropy Loss: [1.9674294  0.92996323]
Correct: [38.75   70.9375]%
Cross Entropy Loss: [1.7327772 0.6995934]
Correct: [42.03125 76.71875]%
Cross Entropy Loss: [1.9335334  0.83470154]
Correct: [38.125  70.9375]%
Cross Entropy Loss: [1.970116  0.8747777]
Correct: [40.9375 68.75  ]%
Cross Entropy Loss: [1.8333813  0.74357194]
Correct: [41.5625  75.78125]%
Cross Entropy Loss: [1.89984    0.87510043]
Correct: [40.78125 71.5625 ]%
Cross Entropy Loss: [1.5563903  0.63250226]
Correct: [46.5625 79.375 ]%
Cross Entropy Loss: [1.9519466 0.7468983]
Correct: [38.75    75.15625]%
Cross Entropy Loss: [2.0394206 0.9192907]
Correct: [37.1875 66.875 ]%
Cross Entropy Loss: [1.7850641  0.80538416]
Correct: [42.34375 72.65625]%
Cross Entropy Loss: [1.8425964 0.7778138]
Correct: [40.15625 74.21875]%
Cross Entropy Loss: [1.6837444 0.6270924]
Correct: [41.25 78.75]%
Cross Entropy Loss: [1.9132162  0.84209406]
Correct: [36.25   71.5625]%
Cross Entropy Loss: [1.8778851 0.7283275]
Correct: [39.21875 74.6875 ]%
Cross Entropy Loss: [1.968106   0.87756956]
Correct: [36.875   70.78125]%
Cross Entropy Loss: [1.8252537  0.69467133]
Correct: [40.3125 76.875 ]%
Cross Entropy Loss: [2.0060592 0.8010696]
Correct: [37.65625 74.375  ]%
Cross Entropy Loss: [1.972263   0.94063556]
Correct: [37.1875  68.90625]%
Cross Entropy Loss: [1.9500564 0.8680089]
Correct: [36.875   71.40625]%
Cross Entropy Loss: [1.6018801  0.72909397]
Correct: [45.78125 76.25   ]%
Cross Entropy Loss: [1.8163131  0.76785904]
Correct: [41.25 75.  ]%
Cross Entropy Loss: [1.7589004 0.6759887]
Correct: [40.625  75.3125]%
Cross Entropy Loss: [1.7838252 0.7240472]
Correct: [42.8125  73.28125]%
Cross Entropy Loss: [1.9119736 0.9116828]
Correct: [39.53125 71.09375]%
Cross Entropy Loss: [1.9941685 0.8869232]
Correct: [35.15625 72.8125 ]%
Cross Entropy Loss: [1.723987   0.80284435]
Correct: [42.1875 74.6875]%
Cross Entropy Loss: [1.9817253  0.82559747]
Correct: [34.53125 73.59375]%
Cross Entropy Loss: [1.8135223 0.7920741]
Correct: [41.71875 72.8125 ]%
Cross Entropy Loss: [1.8137     0.77574134]
Correct: [40.   73.75]%
Cross Entropy Loss: [1.7480352 0.6294993]
Correct: [40.625   77.03125]%
Cross Entropy Loss: [1.9238939 0.7650114]
Correct: [39.375   72.65625]%
Cross Entropy Loss: [1.9319823 0.8385785]
Correct: [39.6875 72.1875]%
Cross Entropy Loss: [1.9876829  0.78423756]
Correct: [35.78125 73.125  ]%
Cross Entropy Loss: [1.8769922 0.7209439]
Correct: [38.59375 75.     ]%
Cross Entropy Loss: [1.8755362 0.8538121]
Correct: [38.125  72.1875]%
Cross Entropy Loss: [1.6463829  0.63770175]
Correct: [43.4375 78.125 ]%
Cross Entropy Loss: [1.8290745 0.8436715]
Correct: [42.5    73.4375]%
Cross Entropy Loss: [1.6866802  0.64502287]
Correct: [45.15625 77.1875 ]%
Cross Entropy Loss: [1.776133   0.70850533]
Correct: [41.875   75.15625]%
Cross Entropy Loss: [1.8563305 0.882353 ]
Correct: [42.03125 72.1875 ]%
Cross Entropy Loss: [1.851002  0.7285698]
Correct: [40.3125  74.53125]%
Cross Entropy Loss: [1.9158354  0.70658416]
Correct: [40.15625 77.03125]%
Cross Entropy Loss: [1.9164116  0.78191495]
Correct: [38.90625 74.375  ]%
Cross Entropy Loss: [1.7385813  0.73882335]
Correct: [43.59375 75.     ]%
Cross Entropy Loss: [1.7561588 0.6659684]
Correct: [43.75    78.59375]%
Cross Entropy Loss: [1.6397702 0.5984272]
Correct: [44.53125 80.46875]%
Cross Entropy Loss: [1.9269409 0.7890223]
Correct: [39.375  74.6875]%
Cross Entropy Loss: [1.8479939 0.7782055]
Correct: [42.1875 73.75  ]%
Cross Entropy Loss: [1.7991426 0.7226646]
Correct: [40.78125 74.21875]%
Cross Entropy Loss: [1.6745462 0.6489044]
Correct: [42.65625 76.875  ]%
Cross Entropy Loss: [1.8314679  0.79177654]
Correct: [42.5    72.1875]%
Cross Entropy Loss: [1.74814    0.73648876]
Correct: [42.03125 74.53125]%
Cross Entropy Loss: [1.799354   0.81676245]
Correct: [38.90625 72.5    ]%
Cross Entropy Loss: [1.8849579  0.84799707]
Correct: [40.3125  70.15625]%
Cross Entropy Loss: [1.8240869 0.7802156]
Correct: [38.75   74.6875]%
Cross Entropy Loss: [1.8779066 0.7545811]
Correct: [40.9375 74.0625]%
Cross Entropy Loss: [1.962424   0.85455525]
Correct: [39.0625 72.1875]%
Cross Entropy Loss: [1.8820851 0.7173449]
Correct: [39.21875 76.875  ]%
Cross Entropy Loss: [1.7687323 0.7515329]
Correct: [42.1875 74.6875]%
Cross Entropy Loss: [1.7753948  0.65659124]
Correct: [42.03125 77.96875]%
Cross Entropy Loss: [1.9645157 0.8340217]
Correct: [35.3125 70.    ]%
Cross Entropy Loss: [1.8400196 0.7726528]
Correct: [40.9375  73.59375]%
Cross Entropy Loss: [1.83717   0.7054395]
Correct: [37.96875 74.21875]%
Cross Entropy Loss: [1.8303258  0.78517157]
Correct: [40.625   72.34375]%
Cross Entropy Loss: [1.8351654 0.7369355]
Correct: [39.6875 75.3125]%
Cross Entropy Loss: [1.926268   0.81205654]
Correct: [37.96875 73.28125]%
Cross Entropy Loss: [1.8497612 0.7508477]
Correct: [39.84375 74.53125]%
Cross Entropy Loss: [1.7393452  0.78726405]
Correct: [43.90625 74.21875]%
Cross Entropy Loss: [1.8662484 0.771791 ]
Correct: [38.90625 74.375  ]%
Cross Entropy Loss: [1.6769924  0.70335543]
Correct: [43.28125 75.46875]%
Cross Entropy Loss: [1.9903879 0.8262089]
Correct: [35.78125 70.625  ]%
Cross Entropy Loss: [1.8052536  0.79334486]
Correct: [42.03125 71.5625 ]%
Cross Entropy Loss: [1.7869842 0.7548586]
Correct: [40.78125 74.21875]%
Cross Entropy Loss: [1.8763475 0.8085154]
Correct: [39.53125 73.4375 ]%
Cross Entropy Loss: [1.8972038  0.79630816]
Correct: [39.0625  70.15625]%
Cross Entropy Loss: [1.6320307  0.62890494]
Correct: [44.53125 79.21875]%
Cross Entropy Loss: [1.676955   0.68557423]
Correct: [43.4375 77.8125]%
Cross Entropy Loss: [1.859354  0.7636884]
Correct: [37.1875 73.4375]%
Cross Entropy Loss: [2.0318053  0.76494217]
Correct: [36.875  74.0625]%
Cross Entropy Loss: [1.983536   0.86855394]
Correct: [36.5625 70.625 ]%
Cross Entropy Loss: [1.884599   0.81524885]
Correct: [40.15625 72.8125 ]%
Cross Entropy Loss: [1.7432916 0.7475638]
Correct: [42.5    76.5625]%
Cross Entropy Loss: [1.6774868 0.715091 ]
Correct: [41.71875 75.46875]%
Cross Entropy Loss: [1.8700855 0.7257382]
Correct: [41.71875 77.5    ]%
Cross Entropy Loss: [1.8344872 0.8430092]
Correct: [41.71875 70.78125]%
Cross Entropy Loss: [1.8204437 0.6994184]
Correct: [43.75    76.71875]%
Cross Entropy Loss: [1.6771634 0.7012872]
Correct: [43.28125 76.5625 ]%
Cross Entropy Loss: [1.9276854  0.80790555]
Correct: [41.5625 71.875 ]%
Cross Entropy Loss: [1.8222663 0.7315546]
Correct: [38.90625 74.53125]%
Cross Entropy Loss: [2.0073972 0.8597145]
Correct: [35.15625 71.09375]%
Cross Entropy Loss: [1.8549019  0.83070564]
Correct: [40.46875 73.4375 ]%
Cross Entropy Loss: [1.7669204 0.7275291]
Correct: [41.5625  73.59375]%
Cross Entropy Loss: [1.8670609 0.6967986]
Correct: [39.84375 75.9375 ]%
Cross Entropy Loss: [1.7976004  0.85154516]
Correct: [40.    70.625]%
Cross Entropy Loss: [1.8466778 0.8045387]
Correct: [40.46875 70.46875]%
Cross Entropy Loss: [1.9459835  0.88622963]
Correct: [37.96875 70.625  ]%
Cross Entropy Loss: [1.8181686  0.75165784]
Correct: [39.375   75.78125]%
Cross Entropy Loss: [1.887944   0.82552147]
Correct: [37.8125 70.625 ]%
Cross Entropy Loss: [1.8659751 0.820137 ]
Correct: [39.84375 71.25   ]%
Cross Entropy Loss: [1.8616245 0.7185215]
Correct: [38.59375 77.65625]%
Cross Entropy Loss: [1.7099396 0.7082815]
Correct: [42.34375 76.71875]%
Cross Entropy Loss: [1.9223092 0.7922657]
Correct: [38.59375 72.65625]%
Cross Entropy Loss: [1.9240944 0.7930136]
Correct: [37.96875 72.1875 ]%
Cross Entropy Loss: [1.8497072 0.7798716]
Correct: [40.3125 74.0625]%
Cross Entropy Loss: [1.8514309 0.8389745]
Correct: [37.65625 73.75   ]%
Cross Entropy Loss: [1.9340452 0.7787571]
Correct: [39.6875  74.53125]%
Cross Entropy Loss: [1.9593445 0.7743634]
Correct: [36.25 72.5 ]%
Cross Entropy Loss: [1.910127   0.76556516]
Correct: [35.9375  74.84375]%
Cross Entropy Loss: [1.726059  0.7047796]
Correct: [42.1875 75.9375]%
Cross Entropy Loss: [1.7517935 0.6921958]
Correct: [41.875   75.46875]%
Cross Entropy Loss: [2.0438523 0.8684306]
Correct: [36.40625 72.1875 ]%
Cross Entropy Loss: [1.9058447 0.8014797]
Correct: [39.0625  70.46875]%
Cross Entropy Loss: [1.69417    0.72472733]
Correct: [44.21875 74.6875 ]%
Cross Entropy Loss: [1.877044   0.82554054]
Correct: [37.1875 70.3125]%
Cross Entropy Loss: [1.8368466  0.77672184]
Correct: [43.75 75.  ]%
Cross Entropy Loss: [1.7872095 0.7145773]
Correct: [40.3125  73.90625]%
Cross Entropy Loss: [1.9422791 0.9345012]
Correct: [39.84375 69.375  ]%
Cross Entropy Loss: [1.9476992  0.86236876]
Correct: [35.3125  71.40625]%
Cross Entropy Loss: [1.897625   0.82429427]
Correct: [38.28125 72.1875 ]%
Cross Entropy Loss: [1.7784474 0.7807086]
Correct: [41.09375 73.125  ]%
Cross Entropy Loss: [1.9660523 0.897384 ]
Correct: [38.125   70.15625]%
Cross Entropy Loss: [1.9902828 0.7745082]
Correct: [35.46875 73.28125]%
Cross Entropy Loss: [1.7378409  0.67632526]
Correct: [42.8125 77.5   ]%
Cross Entropy Loss: [1.8340861 0.6867131]
Correct: [39.375  77.1875]%
Cross Entropy Loss: [1.7572321 0.6621549]
Correct: [40.9375  77.96875]%
Cross Entropy Loss: [1.9375448 0.8636953]
Correct: [36.71875 71.5625 ]%
Cross Entropy Loss: [2.0006986 0.8485076]
Correct: [33.90625 71.5625 ]%
Cross Entropy Loss: [1.8892233  0.68874675]
Correct: [36.875 78.75 ]%
Cross Entropy Loss: [1.9129661 0.8567591]
Correct: [38.59375 70.625  ]%
Cross Entropy Loss: [1.8728454 0.804616 ]
Correct: [38.4375  73.28125]%
Cross Entropy Loss: [1.9026121 0.7536136]
Correct: [34.53125 74.375  ]%
Cross Entropy Loss: [1.9978958 0.8261097]
Correct: [34.53125 71.40625]%
Cross Entropy Loss: [1.8006767 0.7468387]
Correct: [41.40625 75.78125]%
Cross Entropy Loss: [1.767627  0.6667184]
Correct: [40.46875 75.3125 ]%
Cross Entropy Loss: [1.8433676  0.76450574]
Correct: [38.125   74.53125]%
Cross Entropy Loss: [1.7923958 0.7580448]
Correct: [42.1875  75.15625]%
Cross Entropy Loss: [1.8429663 0.7646257]
Correct: [41.25  75.625]%
Cross Entropy Loss: [1.9887483 0.8645679]
Correct: [38.4375 69.6875]%
Cross Entropy Loss: [1.7126204 0.6909278]
Correct: [41.40625 77.03125]%
Cross Entropy Loss: [1.7132254 0.6850715]
Correct: [43.4375 75.3125]%
Cross Entropy Loss: [1.8654028  0.72895014]
Correct: [38.125 75.625]%
Cross Entropy Loss: [1.7028767  0.63848114]
Correct: [41.25  78.125]%
Cross Entropy Loss: [1.7475588 0.7179818]
Correct: [41.71875 74.53125]%
Cross Entropy Loss: [1.6078475  0.58653915]
Correct: [44.6875  79.21875]%
Cross Entropy Loss: [1.7523896 0.6501301]
Correct: [42.65625 76.71875]%
Cross Entropy Loss: [1.7215122  0.63821113]
Correct: [43.59375 78.125  ]%
Cross Entropy Loss: [1.9593894  0.80048734]
Correct: [36.25 73.75]%
Cross Entropy Loss: [1.7571598  0.79052603]
Correct: [39.375  72.1875]%
Cross Entropy Loss: [2.0199008 0.8190702]
Correct: [37.1875 71.5625]%
Cross Entropy Loss: [1.8702644 0.849905 ]
Correct: [38.125   70.46875]%
Cross Entropy Loss: [2.069848  0.8495375]
Correct: [35.46875 73.90625]%
Cross Entropy Loss: [1.8062305 0.8535535]
Correct: [45.      71.09375]%
Cross Entropy Loss: [1.8538357 0.8133515]
Correct: [38.59375 72.5    ]%
Cross Entropy Loss: [1.8985078 0.796893 ]
Correct: [37.8125  73.28125]%
Cross Entropy Loss: [1.6342977 0.5714007]
Correct: [45.78125 79.375  ]%
Cross Entropy Loss: [2.1256645 0.7838313]
Correct: [36.25    72.96875]%
Cross Entropy Loss: [1.8807955  0.71382284]
Correct: [41.09375 75.9375 ]%
Cross Entropy Loss: [1.6095431 0.6306909]
Correct: [43.75    77.65625]%
Cross Entropy Loss: [1.7420652  0.72109073]
Correct: [41.875  77.1875]%
Cross Entropy Loss: [1.8083982 0.7594255]
Correct: [40.625 73.75 ]%
Cross Entropy Loss: [1.9144669 0.8965824]
Correct: [39.0625 71.5625]%
Cross Entropy Loss: [1.7570843 0.7419653]
Correct: [41.5625 75.3125]%
Cross Entropy Loss: [1.7886908 0.7039047]
Correct: [38.59375 77.03125]%
Cross Entropy Loss: [2.0045753 0.8034806]
Correct: [35.15625 74.0625 ]%
Cross Entropy Loss: [1.9039835 0.7724059]
Correct: [40.    73.125]%
Cross Entropy Loss: [1.7869015 0.6454764]
Correct: [40.9375  77.96875]%
Cross Entropy Loss: [1.8738716 0.7832778]
Correct: [38.4375  73.59375]%
Cross Entropy Loss: [1.9437115 0.7340392]
Correct: [37.34375 75.625  ]%
Cross Entropy Loss: [1.8512815 0.696036 ]
Correct: [41.25 76.25]%
Cross Entropy Loss: [1.7725086 0.6999878]
Correct: [40.78125 76.5625 ]%
Cross Entropy Loss: [1.8070446  0.72082436]
Correct: [39.0625 75.    ]%
Cross Entropy Loss: [1.8857353  0.76497376]
Correct: [38.59375 73.28125]%
Cross Entropy Loss: [1.7486537 0.7181071]
Correct: [42.1875  77.03125]%
Cross Entropy Loss: [1.8787323 0.7411057]
Correct: [41.25    74.21875]%
Cross Entropy Loss: [1.8800808 0.8040821]
Correct: [37.65625 73.28125]%
Cross Entropy Loss: [1.9972029 0.8102072]
Correct: [36.40625 73.59375]%
Cross Entropy Loss: [1.643413  0.6708869]
Correct: [43.59375 76.875  ]%
Cross Entropy Loss: [1.7075832 0.7102415]
Correct: [42.1875 77.8125]%
Cross Entropy Loss: [1.8470719 0.7457353]
Correct: [37.8125 74.375 ]%
Cross Entropy Loss: [1.885349   0.79499114]
Correct: [42.1875  72.96875]%
Cross Entropy Loss: [1.8413236 0.7100686]
Correct: [41.875 75.625]%
Cross Entropy Loss: [1.7878205  0.78331393]
Correct: [41.25   72.8125]%
Cross Entropy Loss: [1.6852751  0.68613523]
Correct: [44.53125 74.53125]%
Cross Entropy Loss: [1.8557537 0.7422128]
Correct: [40.46875 73.75   ]%
Cross Entropy Loss: [1.5932391 0.6061982]
Correct: [47.03125 79.0625 ]%
Cross Entropy Loss: [1.6664194  0.62804294]
Correct: [46.40625 79.21875]%
Cross Entropy Loss: [1.7097756 0.7853028]
Correct: [45.15625 75.     ]%
Cross Entropy Loss: [1.8854202  0.79580796]
Correct: [39.53125 74.0625 ]%
Cross Entropy Loss: [1.7346687 0.6352595]
Correct: [43.4375 79.6875]%
Cross Entropy Loss: [1.9090006  0.78235745]
Correct: [40.15625 74.21875]%
Cross Entropy Loss: [1.9896126 0.8867022]
Correct: [38.28125 71.40625]%
Cross Entropy Loss: [1.7639929 0.7653252]
Correct: [41.09375 74.21875]%
Cross Entropy Loss: [1.7338517 0.6645259]
Correct: [41.40625 78.125  ]%
Cross Entropy Loss: [1.7106667 0.6314148]
Correct: [44.21875 79.53125]%
Cross Entropy Loss: [1.6966025 0.6439932]
Correct: [41.25    77.96875]%
Cross Entropy Loss: [1.9312464  0.80582553]
Correct: [38.125   74.21875]%
Cross Entropy Loss: [1.7275312  0.66003776]
Correct: [43.90625 77.5    ]%
Cross Entropy Loss: [1.8517812  0.76008826]
Correct: [41.5625 75.    ]%
Cross Entropy Loss: [1.9821228  0.83792335]
Correct: [37.8125  71.09375]%
Cross Entropy Loss: [2.137043 0.880414]
Correct: [32.1875  71.09375]%
Cross Entropy Loss: [1.8584884 0.7552613]
Correct: [38.59375 75.625  ]%
Cross Entropy Loss: [1.8776621 0.606078 ]
Correct: [37.96875 80.15625]%
Cross Entropy Loss: [1.7979202  0.73099166]
Correct: [43.28125 76.09375]%
Cross Entropy Loss: [1.8357958 0.7320696]
Correct: [42.65625 75.     ]%
Cross Entropy Loss: [1.9481646  0.81755304]
Correct: [37.8125  71.40625]%
Cross Entropy Loss: [1.7172683  0.80567455]
Correct: [44.53125 73.90625]%
Cross Entropy Loss: [1.6834583 0.7347849]
Correct: [48.28125 77.34375]%
Cross Entropy Loss: [1.9672722 0.772326 ]
Correct: [36.71875 70.625  ]%
Cross Entropy Loss: [1.9486244  0.90571415]
Correct: [36.71875 71.09375]%
Cross Entropy Loss: [1.7228858 0.7370895]
Correct: [42.65625 76.25   ]%
Cross Entropy Loss: [1.8860979 0.7911046]
Correct: [39.84375 72.96875]%
Cross Entropy Loss: [1.8845387 0.8003435]
Correct: [35.78125 72.5    ]%
Cross Entropy Loss: [1.8368202 0.7291575]
Correct: [43.28125 74.84375]%
Cross Entropy Loss: [1.8226898 0.7839356]
Correct: [39.0625 75.    ]%
Cross Entropy Loss: [1.8701004 0.8134883]
Correct: [37.65625 74.53125]%
Cross Entropy Loss: [1.8240659 0.72557  ]
Correct: [39.84375 74.84375]%
Cross Entropy Loss: [1.8214676 0.7448155]
Correct: [36.09375 75.3125 ]%
Cross Entropy Loss: [1.8836753  0.67422974]
Correct: [38.28125 75.9375 ]%
Cross Entropy Loss: [1.9825141 0.7442057]
Correct: [37.1875 73.125 ]%
Cross Entropy Loss: [1.7596476  0.64731306]
Correct: [40.78125 77.5    ]%
Cross Entropy Loss: [1.788497   0.71423346]
Correct: [39.84375 74.21875]%
Cross Entropy Loss: [1.6940105 0.713674 ]
Correct: [43.4375 76.875 ]%
Cross Entropy Loss: [1.8919353  0.75221443]
Correct: [38.28125 73.59375]%
Cross Entropy Loss: [1.8653648 0.7223254]
Correct: [38.59375 72.96875]%
Cross Entropy Loss: [1.7762625 0.7253126]
Correct: [40.      75.15625]%
Cross Entropy Loss: [1.8502035 0.8405493]
Correct: [40.15625 71.71875]%
Cross Entropy Loss: [1.7261009 0.7244376]
Correct: [42.34375 75.78125]%
Cross Entropy Loss: [1.7229208 0.7420155]
Correct: [41.875 75.625]%
Cross Entropy Loss: [1.7158617  0.89575464]
Correct: [40.3125  71.09375]%
Cross Entropy Loss: [1.8110145 0.6980179]
Correct: [40.15625 76.25   ]%
Cross Entropy Loss: [1.923674  0.8904641]
Correct: [37.1875  70.46875]%
Cross Entropy Loss: [1.8107862 0.7424863]
Correct: [43.90625 73.28125]%
Cross Entropy Loss: [1.9413803 0.8221998]
Correct: [33.75    72.96875]%
Cross Entropy Loss: [1.7843673  0.68886673]
Correct: [43.59375 76.40625]%
Cross Entropy Loss: [1.8600174 0.7531792]
Correct: [39.0625 73.4375]%
Cross Entropy Loss: [1.8952646  0.79529774]
Correct: [41.25   74.0625]%
Cross Entropy Loss: [1.7783006  0.68941295]
Correct: [42.03125 75.78125]%
Cross Entropy Loss: [1.8239352  0.76484615]
Correct: [42.03125 74.6875 ]%
Cross Entropy Loss: [1.7419319 0.6887576]
Correct: [42.1875 75.9375]%
Cross Entropy Loss: [1.848032  0.7497262]
Correct: [39.6875 75.    ]%
Cross Entropy Loss: [1.8140606  0.71603686]
Correct: [37.8125 76.5625]%
Cross Entropy Loss: [1.8195035 0.7290523]
Correct: [41.25    76.40625]%
Cross Entropy Loss: [1.6590633 0.6280249]
Correct: [45.3125  77.03125]%
Cross Entropy Loss: [1.7354265 0.6450567]
Correct: [42.5    77.8125]%
Cross Entropy Loss: [1.6807718 0.7894667]
Correct: [43.4375 73.125 ]%
Cross Entropy Loss: [1.9804819 0.886474 ]
Correct: [37.1875  71.40625]%
Cross Entropy Loss: [1.7584362  0.82375383]
Correct: [41.40625 73.90625]%
Cross Entropy Loss: [1.7950141  0.79684573]
Correct: [37.34375 73.59375]%
Cross Entropy Loss: [1.7892345  0.82098883]
Correct: [41.40625 73.125  ]%
Cross Entropy Loss: [1.8129139 0.7169464]
Correct: [40.625   75.15625]%
Cross Entropy Loss: [1.7879629 0.6322259]
Correct: [40.      78.28125]%
Cross Entropy Loss: [1.698695  0.6543917]
Correct: [42.65625 79.0625 ]%
Cross Entropy Loss: [1.6848471 0.6388111]
Correct: [43.4375  78.28125]%
Cross Entropy Loss: [1.7202164 0.6100524]
Correct: [42.65625 78.59375]%
Cross Entropy Loss: [1.7925708 0.6877915]
Correct: [42.5 77.5]%
Cross Entropy Loss: [1.8086828  0.74431163]
Correct: [39.53125 76.71875]%
Cross Entropy Loss: [1.9524109  0.80411565]
Correct: [37.8125  72.34375]%
Cross Entropy Loss: [1.755463  0.7266985]
Correct: [42.8125 78.4375]%
Cross Entropy Loss: [1.9463917 0.8478818]
Correct: [38.90625 70.3125 ]%
Cross Entropy Loss: [1.795377  0.7209218]
Correct: [40.78125 77.34375]%
Cross Entropy Loss: [1.7322928 0.743986 ]
Correct: [43.125 75.625]%
Cross Entropy Loss: [1.9727443 0.8645293]
Correct: [37.34375 73.28125]%
Cross Entropy Loss: [1.6294184  0.58596414]
Correct: [44.375  80.3125]%
Cross Entropy Loss: [1.87487    0.71413624]
Correct: [37.96875 77.65625]%
Cross Entropy Loss: [1.7748121 0.7535546]
Correct: [39.0625 75.    ]%
Cross Entropy Loss: [1.7809922  0.73761594]
Correct: [41.71875 76.71875]%
Cross Entropy Loss: [2.0449069 0.826884 ]
Correct: [34.21875 72.34375]%
Cross Entropy Loss: [1.973946   0.81610805]
Correct: [36.25    74.53125]%
Cross Entropy Loss: [1.8532118 0.7237357]
Correct: [41.71875 75.15625]%
Cross Entropy Loss: [1.7501322 0.8349835]
Correct: [40.625 70.625]%
Cross Entropy Loss: [1.7213131 0.6886469]
Correct: [44.6875 76.25  ]%
Cross Entropy Loss: [1.8186115  0.68320763]
Correct: [42.34375 77.34375]%
Cross Entropy Loss: [1.8207314 0.7635733]
Correct: [41.09375 74.84375]%
Cross Entropy Loss: [1.6341416  0.64564574]
Correct: [44.375  77.1875]%
Cross Entropy Loss: [1.7995743  0.81821936]
Correct: [45.     72.8125]%
Cross Entropy Loss: [1.6712196  0.80211383]
Correct: [41.09375 74.21875]%
Cross Entropy Loss: [2.0123973  0.79460263]
Correct: [35.625   72.34375]%
Cross Entropy Loss: [1.98172    0.85590297]
Correct: [38.28125 70.625  ]%
Cross Entropy Loss: [1.6283687 0.5534599]
Correct: [43.90625 80.     ]%
Cross Entropy Loss: [1.9331211  0.69911397]
Correct: [36.71875 75.3125 ]%
Cross Entropy Loss: [1.8644075  0.67485905]
Correct: [40.3125  76.40625]%
Cross Entropy Loss: [1.705466  0.5917467]
Correct: [42.03125 79.84375]%
Cross Entropy Loss: [1.8873714 0.7320221]
Correct: [39.6875 75.    ]%
Cross Entropy Loss: [1.8719769  0.87320125]
Correct: [38.4375 70.    ]%
Cross Entropy Loss: [1.809489   0.78212976]
Correct: [39.21875 72.96875]%
Cross Entropy Loss: [1.9043369 0.8356202]
Correct: [37.8125  72.96875]%
Cross Entropy Loss: [1.8085178 0.8614172]
Correct: [42.96875 72.1875 ]%
Cross Entropy Loss: [2.0720596 0.8684599]
Correct: [34.6875  71.40625]%
Cross Entropy Loss: [1.8788372 0.7722525]
Correct: [36.25    73.59375]%
Cross Entropy Loss: [1.7210352 0.7348361]
Correct: [40.      75.78125]%
Cross Entropy Loss: [1.8855873 0.7238193]
Correct: [37.65625 75.625  ]%
Cross Entropy Loss: [1.9413977  0.73235035]
Correct: [40.3125 75.    ]%
Cross Entropy Loss: [1.6164252  0.64465356]
Correct: [46.875   77.03125]%
Cross Entropy Loss: [1.9112183 0.8798522]
Correct: [41.875   71.71875]%
Cross Entropy Loss: [1.782131  0.7429001]
Correct: [42.03125 74.84375]%
Cross Entropy Loss: [1.7988346 0.864394 ]
Correct: [40.9375 71.5625]%
Cross Entropy Loss: [1.8505834  0.76912296]
Correct: [37.5  73.75]%
Cross Entropy Loss: [1.6914032 0.6178977]
Correct: [44.84375 78.125  ]%
Cross Entropy Loss: [1.8373314 0.604564 ]
Correct: [38.59375 78.90625]%
Cross Entropy Loss: [1.9020361 0.8240668]
Correct: [37.34375 70.78125]%
Cross Entropy Loss: [1.8441069 0.7894813]
Correct: [39.0625 74.375 ]%
Cross Entropy Loss: [1.8611752  0.78715086]
Correct: [40.    75.625]%
Cross Entropy Loss: [1.8893249  0.72640985]
Correct: [42.03125 76.71875]%
Cross Entropy Loss: [1.9400431 0.7556305]
Correct: [38.90625 74.21875]%
Cross Entropy Loss: [1.671291   0.63234997]
Correct: [43.28125 76.40625]%
Cross Entropy Loss: [1.8625349 0.7403159]
Correct: [41.71875 74.6875 ]%
Cross Entropy Loss: [1.8208174  0.69469976]
Correct: [41.40625 75.9375 ]%
Cross Entropy Loss: [1.7945637  0.62442476]
Correct: [40.46875 77.03125]%
Cross Entropy Loss: [1.7649858 0.618971 ]
Correct: [39.6875  79.53125]%
Cross Entropy Loss: [1.9157734  0.83062154]
Correct: [39.6875  72.34375]%
Cross Entropy Loss: [1.7623132 0.661338 ]
Correct: [42.1875 76.875 ]%
Cross Entropy Loss: [1.7633638  0.78048056]
Correct: [43.75  75.625]%
Cross Entropy Loss: [1.866252  0.7037738]
Correct: [37.34375 76.09375]%
Cross Entropy Loss: [1.8208268  0.68361837]
Correct: [38.90625 76.09375]%
Cross Entropy Loss: [1.899427  0.7951032]
Correct: [38.28125 73.28125]%
Cross Entropy Loss: [1.6442044  0.61344874]
Correct: [41.875  78.4375]%
Cross Entropy Loss: [1.7455423 0.7364494]
Correct: [40.3125 75.    ]%
Cross Entropy Loss: [1.6499659  0.62427586]
Correct: [46.71875 79.0625 ]%
Cross Entropy Loss: [1.8366671 0.6956587]
Correct: [39.375 74.375]%
Cross Entropy Loss: [1.792592  0.6433785]
Correct: [42.65625 78.75   ]%
Cross Entropy Loss: [1.6961849 0.639775 ]
Correct: [45.15625 78.59375]%
Cross Entropy Loss: [1.9530313 0.8281373]
Correct: [36.25  73.125]%
Cross Entropy Loss: [1.6839311 0.6581014]
Correct: [44.375 78.75 ]%
Cross Entropy Loss: [1.8284962  0.73124135]
Correct: [38.4375  77.03125]%
Cross Entropy Loss: [1.7353718  0.71527773]
Correct: [42.8125 77.5   ]%
Cross Entropy Loss: [1.6914799 0.7332767]
Correct: [42.5     76.09375]%
Cross Entropy Loss: [1.724438   0.70617294]
Correct: [40.9375 75.625 ]%
Cross Entropy Loss: [1.9644893  0.80685157]
Correct: [38.4375  73.28125]%
Cross Entropy Loss: [1.8436058  0.72932833]
Correct: [41.09375 74.84375]%
In [ ]:
if not args.augment:
    # if snn_samples already exist, don't recompute
    try:
        snn_samples
    except:
        snn_samples, targets = get_samples(SNN, train_loader, args.n_hidden, device)
    cat = False
    test_accs = []
    train_accs = []
    for i in range(10):
        with torch.no_grad():
            out_projs, acc, losses_out = train_out_proj_fast(SNN, args, 50, 60, snn_samples, train_loader.y, cat=cat, lr=3e-4, weight_decay=0)
        print('Mean abs weights', out_projs[-1].out_proj.weight.abs().mean())
        test_accs.append(get_accuracy(SNN, out_projs, test_loader, device, cat=cat)[0])
        train_accs.append(get_accuracy(SNN, out_projs, train_loader, device, cat=cat)[0])
    test_accs = torch.stack([torch.tensor(ta) for ta in test_accs])
    train_accs = torch.stack([torch.tensor(ta) for ta in train_accs])
    print(f'Fast Classifier Mean Test Accuracy: {100*torch.mean(test_accs, dim=0)}, Std: {100*torch.std(test_accs, dim=0)}')
    print(f'Fast Classifier Mean Train Accuracy: {100*torch.mean(train_accs, dim=0)}, Std: {100*torch.std(train_accs, dim=0)}')
  0%|          | 0/127 [00:00<?, ?it/s]
  0%|          | 0/50 [00:00<?, ?it/s]
Cross Entropy Loss: [3.8253505 3.0756338]
Correct: [18.19519372 26.15252575]%
Cross Entropy Loss: [2.435752  1.7280148]
Correct: [30.93428151 46.83668465]%
Cross Entropy Loss: [2.160307  1.3958548]
Correct: [36.15743011 55.55419323]%
Cross Entropy Loss: [1.959918 1.099987]
Correct: [40.81657675 63.76900441]%
Cross Entropy Loss: [1.8192909  0.94513005]
Correct: [43.64884747 68.40362923]%
Cross Entropy Loss: [1.7344646 0.8637667]
Correct: [46.16233448 71.30946542]%
Cross Entropy Loss: [1.6232882 0.7157811]
Correct: [48.7984306 76.2015694]%
Cross Entropy Loss: [1.5045916 0.7067189]
Correct: [51.26287396 75.93182933]%
Cross Entropy Loss: [1.4605676  0.58506835]
Correct: [52.32957332 80.65228053]%
Cross Entropy Loss: [1.4143058 0.5539628]
Correct: [53.87444826 81.60863168]%
Cross Entropy Loss: [1.3321216 0.5042412]
Correct: [55.84845513 83.41098578]%
Cross Entropy Loss: [1.2873683  0.46054745]
Correct: [57.11132908 85.06620893]%
Cross Entropy Loss: [1.2408503 0.4390595]
Correct: [57.88376655 85.40951447]%
Cross Entropy Loss: [1.213827 0.425276]
Correct: [58.91368318 86.01029917]%
Cross Entropy Loss: [1.16867    0.37456462]
Correct: [60.47081903 88.18048063]%
Cross Entropy Loss: [1.1561998  0.39060327]
Correct: [61.72143207 87.6532614 ]%
Cross Entropy Loss: [1.1111376  0.32136193]
Correct: [62.40804316 89.87248651]%
Cross Entropy Loss: [1.0681257 0.3252484]
Correct: [63.53604708 89.59048553]%
Cross Entropy Loss: [1.0520318  0.28244314]
Correct: [64.49239823 90.98822952]%
Cross Entropy Loss: [1.0077982  0.29786062]
Correct: [65.37518391 90.362923  ]%
Cross Entropy Loss: [1.0043541 0.309239 ]
Correct: [65.22805297 89.84796469]%
Cross Entropy Loss: [1.0125129  0.28418177]
Correct: [65.85335949 90.89014223]%
Cross Entropy Loss: [0.9753493  0.25266042]
Correct: [67.49632173 92.30014713]%
Cross Entropy Loss: [0.92627794 0.22228616]
Correct: [68.09710642 93.40362923]%
Cross Entropy Loss: [0.90162784 0.20880054]
Correct: [68.75919568 93.6733693 ]%
Cross Entropy Loss: [0.87356424 0.19838063]
Correct: [69.78911231 94.274154  ]%
Cross Entropy Loss: [0.8939513  0.21920532]
Correct: [69.34771947 93.48945562]%
Cross Entropy Loss: [0.8437859  0.20732298]
Correct: [70.64737616 93.6733693 ]%
Cross Entropy Loss: [0.8079421  0.17095126]
Correct: [71.84894556 95.29180971]%
Cross Entropy Loss: [0.82353973 0.18662097]
Correct: [71.86120647 94.29867582]%
Cross Entropy Loss: [0.8447051  0.18650953]
Correct: [70.54928887 94.33545856]%
Cross Entropy Loss: [0.8009722  0.16313045]
Correct: [72.70720942 95.24276606]%
Cross Entropy Loss: [0.79710627 0.2049526 ]
Correct: [72.58460029 93.64884747]%
Cross Entropy Loss: [0.799818   0.16442634]
Correct: [72.74399215 95.36537518]%
Cross Entropy Loss: [0.776923   0.19967774]
Correct: [73.41834232 94.00441393]%
Cross Entropy Loss: [0.7480957  0.12378642]
Correct: [74.14173615 96.93477195]%
Cross Entropy Loss: [0.7043516  0.10869502]
Correct: [74.97547818 97.70720942]%
Cross Entropy Loss: [0.7579839  0.11800329]
Correct: [74.06817067 96.92251103]%
Cross Entropy Loss: [0.7157597  0.13791463]
Correct: [75.76017656 96.12555174]%
Cross Entropy Loss: [0.7132601  0.15424126]
Correct: [75.36782737 95.23050515]%
Cross Entropy Loss: [0.68988866 0.14747201]
Correct: [76.17704757 95.8803335 ]%
Cross Entropy Loss: [0.6537886  0.12989375]
Correct: [77.48896518 96.21137813]%
Cross Entropy Loss: [0.6924962  0.11892182]
Correct: [75.71113291 96.73859735]%
Cross Entropy Loss: [0.63272387 0.09163643]
Correct: [78.18783718 97.8911231 ]%
Cross Entropy Loss: [0.64603686 0.10222472]
Correct: [77.93035802 97.62138303]%
Cross Entropy Loss: [0.6929513  0.11437859]
Correct: [76.49583129 97.10642472]%
Cross Entropy Loss: [0.63686204 0.11174039]
Correct: [78.22461991 96.89798921]%
Cross Entropy Loss: [0.6155032  0.07972218]
Correct: [78.34722903 98.18538499]%
Cross Entropy Loss: [0.6292907  0.11914735]
Correct: [78.48209907 96.84894556]%
Cross Entropy Loss: [0.61500025 0.069534  ]
Correct: [78.8131437  98.71260422]%
Mean abs weights tensor(0.0364, grad_fn=<MeanBackward0>)
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.23%
From layer 1:
Accuracy: 64.66%
  0%|          | 0/128 [00:00<?, ?it/s]
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
Input In [4], in <cell line: 1>()
     13     print('Mean abs weights', out_projs[-1].out_proj.weight.abs().mean())
     14     test_accs.append(get_accuracy(SNN, out_projs, test_loader, device, cat=cat)[0])
---> 15     train_accs.append(get_accuracy(SNN, out_projs, train_loader, device, cat=cat)[0])
     16 test_accs = torch.stack([torch.tensor(ta) for ta in test_accs])
     17 train_accs = torch.stack([torch.tensor(ta) for ta in train_accs])

File ~/ownCloud/ETH/Master/Project_2/SNN_CLAPP/utils.py:150, in get_accuracy(SNN, out_projs, dataloader, device, cat)
    148 for step in range(inp.shape[1]):
    149     data_step = inp[:,step].float().to(device)
--> 150     spk_step, _, _ = SNN(data_step, 0)
    151     spk_step = [data_step, *spk_step]
    152     for i, out_proj in enumerate(out_projs):

File ~/miniconda3/lib/python3.9/site-packages/torch/nn/modules/module.py:1501, in Module._call_impl(self, *args, **kwargs)
   1496 # If we don't have any hooks, we want to skip the rest of the logic in
   1497 # this function, and just call forward.
   1498 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
   1499         or _global_backward_pre_hooks or _global_backward_hooks
   1500         or _global_forward_hooks or _global_forward_pre_hooks):
-> 1501     return forward_call(*args, **kwargs)
   1502 # Do not call functions when jit is used
   1503 full_backward_hooks, non_full_backward_hooks = [], []

File ~/ownCloud/ETH/Master/Project_2/SNN_CLAPP/model.py:90, in EchoSpike.forward(self, inp, bf, freeze, inp_activity)
     88 for idx, layer in enumerate(self.layers):
     89     factor = bf if not idx in freeze else 0
---> 90     spk, mem, loss = layer(layer_in, factor, inp_activity=inp_activity)
     91     if idx < len(self.layers) - 1:
     92         if self.recurrency_type == 'full':

File ~/miniconda3/lib/python3.9/site-packages/torch/nn/modules/module.py:1501, in Module._call_impl(self, *args, **kwargs)
   1496 # If we don't have any hooks, we want to skip the rest of the logic in
   1497 # this function, and just call forward.
   1498 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
   1499         or _global_backward_pre_hooks or _global_backward_hooks
   1500         or _global_forward_hooks or _global_forward_pre_hooks):
-> 1501     return forward_call(*args, **kwargs)
   1502 # Do not call functions when jit is used
   1503 full_backward_hooks, non_full_backward_hooks = [], []

File ~/ownCloud/ETH/Master/Project_2/SNN_CLAPP/model.py:232, in EchoSpike_layer.forward(self, inp, bf, dropin, inp_activity)
    230 inp = torch.atleast_2d(inp)
    231 cur = self.fc(inp)
--> 232 spk, self.mem = self.lif(cur, self.mem)
    233 self.spk_trace = self._update_trace(self.spk_trace, spk, decay=False)
    234 loss = torch.tensor(0.)

File ~/miniconda3/lib/python3.9/site-packages/torch/nn/modules/module.py:1501, in Module._call_impl(self, *args, **kwargs)
   1496 # If we don't have any hooks, we want to skip the rest of the logic in
   1497 # this function, and just call forward.
   1498 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
   1499         or _global_backward_pre_hooks or _global_backward_hooks
   1500         or _global_forward_hooks or _global_forward_pre_hooks):
-> 1501     return forward_call(*args, **kwargs)
   1502 # Do not call functions when jit is used
   1503 full_backward_hooks, non_full_backward_hooks = [], []

File ~/miniconda3/lib/python3.9/site-packages/snntorch/_neurons/leaky.py:180, in Leaky.forward(self, input_, mem)
    173 # TO-DO: alternatively, we could do torch.exp(-1 /
    174 # self.beta.clamp_min(0)),
    175 # giving actual time constants instead of values in [0, 1] as
    176 # initial beta
    177 # beta = self.beta.clamp(0, 1)
    179 if not self.init_hidden:
--> 180     self.reset = self.mem_reset(mem)
    181     mem = self._build_state_function(input_, mem)
    183     if self.state_quant:

File ~/miniconda3/lib/python3.9/site-packages/snntorch/_neurons/neurons.py:107, in SpikingNeuron.mem_reset(self, mem)
    104 """Generates detached reset signal if mem > threshold.
    105 Returns reset."""
    106 mem_shift = mem - self.threshold
--> 107 reset = self.spike_grad(mem_shift).clone().detach()
    109 return reset

File ~/miniconda3/lib/python3.9/site-packages/torch/autograd/function.py:503, in Function.apply(cls, *args, **kwargs)
    501 @classmethod
    502 def apply(cls, *args, **kwargs):
--> 503     if not torch._C._are_functorch_transforms_active():
    504         # See NOTE: [functorch vjp and autograd interaction]
    505         args = _functorch.utils.unwrap_dead_wrappers(args)
    506         return super().apply(*args, **kwargs)  # type: ignore[misc]

KeyboardInterrupt: 
In [ ]:
# run all checkpoints
if not args.augment: 
    last_ckp = 20
    test_accs_ckpt = []
    train_accs_ckpt = []
    epochs = []
    while True:
        print(f'Checkpoint {last_ckp}')
        SNN_ckp = EchoSpike(args.n_inputs, args.n_hidden, beta=args.beta, device=device, recurrency_type=args.recurrency_type, online=args.online).to(device)
        try:
            print(model_name[:-3] + f'_epoch{last_ckp}.pt')
            state_dict = torch.load(model_name[:-3] + f'_epoch{last_ckp}.pt', map_location=device)
            # state_dict = {key.replace('clapp', 'layers'):value for key, value in state_dict.items()}
            # torch.save(state_dict, model_name + f'_epoch{last_ckp}.pt')
            SNN_ckp.load_state_dict(state_dict)
        except:
            if last_ckp > 1500:
                break
            else:
                last_ckp += 20
                continue
        epochs.append(last_ckp)
        last_ckp += 20
        snn_samples, targets = get_samples(SNN_ckp, train_loader, args.n_hidden, device)
        cat = True
        with torch.no_grad():
            out_projs, acc, losses_out = train_out_proj_fast(SNN_ckp, args, 60, 60, snn_samples, train_loader.y, cat=cat, lr=1e-4, weight_decay=1)
        test_accs_ckpt.append(torch.tensor(get_accuracy(SNN_ckp, out_projs, test_loader, device, cat=cat)[0]))
        train_accs_ckpt.append(torch.tensor(get_accuracy(SNN_ckp, out_projs, train_loader, device, cat=cat)[0])) 
    test_accs_ckpt = torch.stack(test_accs_ckpt)
    train_accs_ckpt = torch.stack(train_accs_ckpt)
    # save the results
    torch.save(torch.stack([torch.tensor(tac) for tac in test_accs_ckpt]), model_name[:-3]+'_test_accs_ckpt.pt')
    torch.save(torch.stack([torch.tensor(tac) for tac in train_accs_ckpt]), model_name[:-3]+'_train_accs_ckpt.pt')
In [ ]:
# plot train and test accuracy over time
plt.figure()
for i in range(test_accs_ckpt.shape[-1]):
    if i == 0:
        plt.plot(epochs, 100*test_accs_ckpt[:,i], color=color_list[i], label=f'Directly from inputs')
        plt.plot(epochs, 100*train_accs_ckpt[:,i], color=color_list[i], linestyle='--')
    else:
        plt.plot(epochs, 100*test_accs_ckpt[:,i], color=color_list[i], label=f'Layer {i}')
        plt.plot(epochs, 100*train_accs_ckpt[:,i], color=color_list[i], linestyle='--')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
Out[ ]:
<matplotlib.legend.Legend at 0x7f4d60de9850>
In [ ]:
if not args.augment:
    # if snn_samples already exist, don't recompute
    try:
        snn_samples
    except:
        snn_samples, targets = get_samples(SNN, train_loader, args.n_hidden, device)
    cat = False
    out_projs_closed = train_out_proj_closed_form(args, snn_samples, targets, cat=cat)
    test_acc_closed, _ = get_accuracy(SNN, out_projs_closed, test_loader, device, cat=cat)
    train_acc_closed, _ = get_accuracy(SNN, out_projs_closed, train_loader, device, cat=cat)

    # grouped Bar plot the Accuracies of the different layers both during training and testing
    sns.set_theme(style="whitegrid")
    labels = ['From Inputs Directly', *[f'Until Layer {i+1}' for i in range(len(SNN.layers))]]
    if not cat:
        labels = ['From Inputs Directly', *[f'From Layer {i+1}' for i in range(len(SNN.layers))]]
    x = np.arange(len(labels))  # the label locations
    width = 0.35  # the width of the bars
    fig, ax = plt.subplots()
    rects1 = ax.bar(x - width/2, 100*torch.tensor(test_acc_closed), width, label='Test Accuracy', color=color_list[0])
    rects2 = ax.bar(x + width/2, 100*torch.tensor(train_acc_closed), width, label='Train Accuracy', color=color_list[1])
    # remove horizontal lines and spines
    ax.spines['right'].set_visible(False)
    ax.spines['left'].set_visible(False)
    ax.xaxis.grid(False)
    plt.xticks(np.arange(len(out_projs_closed)), labels, rotation=45)
    plt.legend()
    plt.ylabel('Accuracy [%]')
    plt.ylim([25, 100])
(20, 700) 0.010724677 -0.010629931
(20, 450) 0.074521676 -0.04809181
(20, 450) 0.46014944 -0.23917717
(20, 450) 0.16177498 -0.18633702
(20, 450) 0.84205234 -0.84336966
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 28.89%
From layer 1:
Accuracy: 56.85%
From layer 2:
Accuracy: 64.53%
From layer 3:
Accuracy: 68.20%
From layer 4:
Accuracy: 66.65%
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 66.96%
From layer 1:
Accuracy: 81.18%
From layer 2:
Accuracy: 88.25%
From layer 3:
Accuracy: 90.77%
From layer 4:
Accuracy: 86.13%
In [ ]:
if args.augment:
    # plot some training characteristics
    print(f'Accuracy of last quarter: {100*acc[-len(acc)//4:].mean(axis=0)}%')
    plt.figure()
    for i in range(len(acc[0])):
        plt.plot(np.asarray(acc)[:,i]*100, color=color_list[i])
    plt.ylabel('Accuracy [%]')
    plt.xlabel('Training Step [x500]')
    labels = ['From Inputs directly', *[f'From Layer {i+1}' for i in range(len(SNN.layers))]]
    plt.legend(labels)
    plt.ylim([65, 95])
    plt.figure()
    print(losses_out.shape)
    for i in range(losses_out.shape[1]):
        plt.plot(np.arange(len(losses_out))/len(train_loader), savgol_filter(losses_out[:,i], 99, 1), label=labels[i], color=color_list[i])
    plt.ylabel('Cross Entropy Loss')
    plt.xlabel('Training Step')
    plt.ylim([0.15, 1.0])
    plt.legend();
Accuracy of last quarter: [40.25176332 74.64145768]%
torch.Size([12744, 2])

Get output projection Accuracy on test set¶

In [ ]:
test_acc, pred_matrix = get_accuracy(SNN, out_projs, test_loader, device, cat=cat)
plt.figure()
plt.plot(100*np.asarray(test_acc))
plt.ylabel('Accuracy [%]')
plt.xlabel('Layer')

plt.figure()
plt.imshow(pred_matrix, origin='lower')
plt.title('Prediction Matrix for the final layer')
plt.xlabel('Prediction')
plt.ylabel('Target')
plt.xticks([i for i in range(args.n_outputs)])
plt.yticks([i for i in range(args.n_outputs)])
plt.colorbar();
  0%|          | 0/36 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 45.10%
From layer 1:
Accuracy: 64.93%
In [ ]:
from utils import get_accuracy
if args.augment:
    train_acc, _ = get_accuracy(SNN, out_projs, train_loader, device, cat=cat) 
else:
    test_acc = torch.mean(test_accs, dim=0)
    print(test_acc)
    train_acc = torch.mean(train_accs, dim=0)
# grouped Bar plot the Accuracies of the different layers both during training and testing
sns.set_theme(style="whitegrid")
labels = ['From Inputs Directly', *[f'Until Layer {i+1}' for i in range(len(SNN.layers))]]
if not cat:
    labels = ['From Inputs Directly', *[f'From Layer {i+1}' for i in range(len(SNN.layers))]]
x = np.arange(len(labels))  # the label locations
width = 0.35  # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(x - width/2, 100*torch.tensor(test_acc), width, label='Test Accuracy', color=color_list[0])
rects2 = ax.bar(x + width/2, 100*torch.tensor(train_acc), width, label='Train Accuracy', color=color_list[1])
if not args.augment:
    ax.errorbar(x - width/2, 100*test_acc, yerr=100*torch.std(test_accs, dim=0), fmt='none', capsize=6, color=color_list[3])
    ax.errorbar(x + width/2, 100*train_acc, yerr=100*torch.std(train_accs, dim=0), fmt='none', capsize=6, color=color_list[3])
# remove horizontal lines and spines
ax.spines['right'].set_visible(False)
ax.spines['left'].set_visible(False)
ax.xaxis.grid(False)
plt.xticks(np.arange(len(out_projs)), labels, rotation=45)
plt.legend()
plt.ylabel('Accuracy [%]')
plt.ylim([25, 100])
#plt.title('SHD Accuracy');
  0%|          | 0/128 [00:00<?, ?it/s]
Directly from inputs:
Accuracy: 47.07%
From layer 1:
Accuracy: 78.49%
Out[ ]:
(25.0, 100.0)

Few Shot Learning (discontinued)¶

In [ ]:
# Randomly select k sample of each class and save the spiking activity
n_outputs = 20
n_repeats = 1
k = 20
fewshot_accuracies = torch.zeros((n_repeats, len(SNN.layers)))
for n in range(n_repeats):
    SNN.reset(0)
    one_shot_samples = torch.zeros(n_outputs, n_time_bins, n_inputs)
    one_shot_spks = torch.zeros(n_outputs, len(SNN.layers), n_hidden[0])
    for i in trange(n_outputs):
        for j in range(k):
            img, _ = train_loader.next_item(i, contrastive=False)
            one_shot_samples[i] = img.squeeze()
            for t in range(n_time_bins):
                logits, mem_his, clapp_loss = SNN(img[t].float(), 0) 
                one_shot_spks[i] += torch.stack(logits).squeeze()

    def metric(spk, one_shot):
        dists = torch.zeros(spk.shape[0], args.n_outputs)
        for i in range(args.n_outputs):
            one_shot_i = one_shot[i] / one_shot[i].sum()
            dists[:, i] = torch.einsum('bi, i->b' , spk, one_shot_i)
        return dists

    def get_predictions(spks):
        preds = torch.zeros(len(spks), spks[0].shape[0])
        # for each layer get the prediction
        for i in range(len(spks)):
            dists = metric(spks[i], one_shot_spks[:,i])
            preds[i] = dists.argmax(axis=-1)
        return preds
    dataset = test_loader
    batch = int(len(dataset)/100)
    correct_oneshot = torch.zeros(len(SNN.layers))
    SNN.eval()
    pred_matrix_oneshot = torch.zeros(n_outputs, n_outputs)
    for idx in trange(0, len(dataset), batch):
        SNN.reset(0)
        inp, target = dataset.x[idx:idx+batch], dataset.y[idx:idx+batch]
        logits = torch.zeros(len(SNN.layers), inp.shape[0], n_hidden[0])
        for step in range(inp.shape[1]):
            data_step = inp[:,step].float().to(device)
            spk_step, _, _ = SNN(data_step, 0)
            logits += torch.stack(spk_step)
        preds = get_predictions(logits)
        for i in range(preds.shape[0]):
            correct_oneshot[i] += int((preds[i] == target).sum())
        # for the last layer create the prediction matrix
        for j in range(preds.shape[1]):
            pred_matrix_oneshot[int(target[j]), int(preds[-1, j])] += 1
    correct_oneshot /= len(dataset)
    for i in range(len(SNN.layers)):
        print(f'From layer {i+1}:')
        print(f'Accuracy: {100*correct_oneshot[i]:.2f}%')
        fewshot_accuracies[n, i] = correct_oneshot[i]
    plt.imshow(pred_matrix_oneshot, origin='lower')
    plt.title('Prediction Matrix for the final layer')
    plt.xlabel('Prediction')
    plt.ylabel('Target')
    plt.xticks([i for i in range(n_outputs)])
    plt.yticks([i for i in range(n_outputs)])
    plt.colorbar();
    plt.figure()
In [ ]:
# Boxplot of the accuracies
plt.figure()
sns.set_style("whitegrid")
g = sns.boxplot(data=fewshot_accuracies*100)
# remove left spines
sns.despine(left=True)
plt.xticks(np.arange(len(SNN.layers)), [f'Layer {i+1}' for i in range(len(SNN.layers))])
plt.ylabel('Few-Shot Test Accuracy [%]')
plt.ylim([0, 100])
print(f'Average Accuracy: {100*fewshot_accuracies.mean(axis=0)}%')
print(f'Maximum Accuracy: {fewshot_accuracies.max(axis=0)}%')